diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b1e525fcca7c..233603ea5440 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python-mono-repo:latest - digest: sha256:957ddc3272f6b8058ff0ca2e8692b56f0a764d804fd577670385d54f19ee43d6 + digest: sha256:1f42c1d6b70210540f55110662ae80e22b03dfb897782b09e546148599d3336c diff --git a/.github/workflows/configure_release_please.yml b/.github/workflows/configure_release_please.yml index 07929edb0a8b..d3a3f2b53420 100644 --- a/.github/workflows/configure_release_please.yml +++ b/.github/workflows/configure_release_please.yml @@ -42,7 +42,7 @@ jobs: with: fetch-depth: 2 - name: Set up Python 3.11 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 - name: Run configure_release_please.py diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 32e91b43a909..498aeef0c4bb 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -19,7 +19,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox @@ -44,7 +44,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 07df25ffe0ea..3de33f190b14 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,7 +19,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index aa318acba43b..7d306909a816 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,7 +43,7 @@ jobs: with: fetch-depth: 2 - name: Set up Python 3.10 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.10 - name: Install script dependencies diff --git a/.github/workflows/scripts.yml b/.github/workflows/scripts.yml index 5268282f8fe0..7621bf969ea1 100644 --- a/.github/workflows/scripts.yml +++ b/.github/workflows/scripts.yml @@ -19,7 +19,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install pytest diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 36e878edb62c..5c6379d5ccb0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -22,7 +22,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -38,9 +38,9 @@ jobs: run: | ci/run_conditional_tests.sh - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ '{{' }} matrix.python {{ '}}' }} path: .coverage-${{ matrix.python }} prerelease: runs-on: ubuntu-latest @@ -56,7 +56,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -84,7 +84,7 @@ jobs: with: fetch-depth: 2 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Set number of files changes in packages directory @@ -97,12 +97,12 @@ jobs: python -m pip install coverage - name: Download coverage results if: ${{ steps.date.packages.num_files_changed > 0 }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results if: ${{ steps.date.packages.num_files_changed > 0 }} run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index f8278adb58a2..59a48aa2063f 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -262,9 +262,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/.release-please-manifest.json b/.release-please-manifest.json index dc66a872ba36..34e586fb9382 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,12 +1,13 @@ { - "packages/google-ai-generativelanguage": "0.3.5", - "packages/google-analytics-admin": "0.22.1", - "packages/google-analytics-data": "0.18.1", + "packages/google-ai-generativelanguage": "0.5.0", + "packages/google-analytics-admin": "0.22.2", + "packages/google-analytics-data": "0.18.3", + "packages/google-apps-meet": "0.1.1", "packages/google-apps-script-type": "0.3.5", "packages/google-area120-tables": "0.11.5", "packages/google-cloud-access-approval": "1.12.0", - "packages/google-cloud-advisorynotifications": "0.3.2", - "packages/google-cloud-alloydb": "0.3.5", + "packages/google-cloud-advisorynotifications": "0.3.3", + "packages/google-cloud-alloydb": "0.3.6", "packages/google-cloud-alloydb-connectors": "0.1.1", "packages/google-cloud-api-gateway": "1.8.0", "packages/google-cloud-api-keys": "0.5.5", @@ -15,11 +16,11 @@ "packages/google-cloud-appengine-admin": "1.10.0", "packages/google-cloud-appengine-logging": "1.4.0", "packages/google-cloud-artifact-registry": "1.10.0", - "packages/google-cloud-asset": "3.21.0", + "packages/google-cloud-asset": "3.23.0", "packages/google-cloud-assured-workloads": "1.11.0", "packages/google-cloud-automl": "2.12.0", "packages/google-cloud-bare-metal-solution": "1.6.0", - "packages/google-cloud-batch": "0.17.6", + "packages/google-cloud-batch": "0.17.9", "packages/google-cloud-beyondcorp-appconnections": "0.4.5", "packages/google-cloud-beyondcorp-appconnectors": "0.4.5", "packages/google-cloud-beyondcorp-appgateways": "0.4.5", @@ -34,52 +35,53 @@ "packages/google-cloud-bigquery-logging": "1.4.0", "packages/google-cloud-bigquery-migration": "0.11.3", "packages/google-cloud-bigquery-reservation": "1.12.0", - "packages/google-cloud-billing": "1.12.0", + "packages/google-cloud-billing": "1.12.1", "packages/google-cloud-billing-budgets": "1.13.0", "packages/google-cloud-binary-authorization": "1.9.0", "packages/google-cloud-build": "3.22.0", "packages/google-cloud-certificate-manager": "1.5.0", - "packages/google-cloud-channel": "1.17.0", + "packages/google-cloud-channel": "1.17.1", + "packages/google-cloud-cloudquotas": "0.1.0", "packages/google-cloud-commerce-consumer-procurement": "0.1.1", "packages/google-cloud-common": "1.3.0", "packages/google-cloud-compute": "1.15.0", "packages/google-cloud-confidentialcomputing": "0.4.3", - "packages/google-cloud-config": "0.1.2", + "packages/google-cloud-config": "0.1.3", "packages/google-cloud-contact-center-insights": "1.15.0", - "packages/google-cloud-container": "2.36.0", + "packages/google-cloud-container": "2.38.0", "packages/google-cloud-containeranalysis": "2.13.0", "packages/google-cloud-contentwarehouse": "0.7.3", "packages/google-cloud-data-fusion": "1.9.0", "packages/google-cloud-data-qna": "0.10.5", - "packages/google-cloud-datacatalog": "3.17.0", + "packages/google-cloud-datacatalog": "3.17.2", "packages/google-cloud-datacatalog-lineage": "0.3.2", "packages/google-cloud-dataflow-client": "0.8.6", "packages/google-cloud-dataform": "0.5.5", "packages/google-cloud-datalabeling": "1.9.0", - "packages/google-cloud-dataplex": "1.10.0", + "packages/google-cloud-dataplex": "1.11.0", "packages/google-cloud-dataproc": "5.8.0", "packages/google-cloud-dataproc-metastore": "1.14.0", "packages/google-cloud-datastream": "1.8.0", - "packages/google-cloud-deploy": "1.15.0", - "packages/google-cloud-dialogflow": "2.26.0", - "packages/google-cloud-dialogflow-cx": "1.30.0", - "packages/google-cloud-discoveryengine": "0.11.4", + "packages/google-cloud-deploy": "1.16.0", + "packages/google-cloud-dialogflow": "2.27.0", + "packages/google-cloud-dialogflow-cx": "1.30.1", + "packages/google-cloud-discoveryengine": "0.11.5", "packages/google-cloud-dlp": "3.14.0", "packages/google-cloud-dms": "1.8.0", - "packages/google-cloud-documentai": "2.21.0", + "packages/google-cloud-documentai": "2.21.1", "packages/google-cloud-domains": "1.6.0", "packages/google-cloud-edgecontainer": "0.5.4", - "packages/google-cloud-edgenetwork": "0.1.1", + "packages/google-cloud-edgenetwork": "0.1.2", "packages/google-cloud-enterpriseknowledgegraph": "0.3.5", "packages/google-cloud-essential-contacts": "1.6.0", "packages/google-cloud-eventarc": "1.10.0", "packages/google-cloud-eventarc-publishing": "0.6.5", "packages/google-cloud-filestore": "1.8.0", - "packages/google-cloud-functions": "1.14.0", + "packages/google-cloud-functions": "1.15.0", "packages/google-cloud-gke-backup": "0.5.3", "packages/google-cloud-gke-connect-gateway": "0.8.5", "packages/google-cloud-gke-hub": "1.11.0", - "packages/google-cloud-gke-multicloud": "0.6.4", + "packages/google-cloud-gke-multicloud": "0.6.5", "packages/google-cloud-gsuiteaddons": "0.3.4", "packages/google-cloud-iam": "2.13.0", "packages/google-cloud-iam-logging": "1.3.0", @@ -93,10 +95,10 @@ "packages/google-cloud-media-translation": "0.11.5", "packages/google-cloud-memcache": "1.8.0", "packages/google-cloud-migrationcenter": "0.1.3", - "packages/google-cloud-monitoring": "2.17.0", + "packages/google-cloud-monitoring": "2.18.0", "packages/google-cloud-monitoring-dashboards": "2.13.0", "packages/google-cloud-monitoring-metrics-scopes": "1.5.0", - "packages/google-cloud-netapp": "0.3.2", + "packages/google-cloud-netapp": "0.3.3", "packages/google-cloud-network-connectivity": "2.3.0", "packages/google-cloud-network-management": "1.12.0", "packages/google-cloud-network-security": "0.9.5", @@ -105,7 +107,7 @@ "packages/google-cloud-optimization": "1.7.0", "packages/google-cloud-orchestration-airflow": "1.10.0", "packages/google-cloud-os-config": "1.16.0", - "packages/google-cloud-os-login": "2.12.0", + "packages/google-cloud-os-login": "2.13.0", "packages/google-cloud-phishing-protection": "1.10.0", "packages/google-cloud-policy-troubleshooter": "1.10.0", "packages/google-cloud-policysimulator": "0.1.2", @@ -114,24 +116,25 @@ "packages/google-cloud-private-catalog": "0.9.5", "packages/google-cloud-public-ca": "0.3.5", "packages/google-cloud-rapidmigrationassessment": "0.1.3", - "packages/google-cloud-recaptcha-enterprise": "1.16.0", + "packages/google-cloud-recaptcha-enterprise": "1.16.1", "packages/google-cloud-recommendations-ai": "0.10.6", - "packages/google-cloud-recommender": "2.13.0", + "packages/google-cloud-recommender": "2.14.0", "packages/google-cloud-redis": "2.14.0", "packages/google-cloud-redis-cluster": "0.1.1", "packages/google-cloud-resource-manager": "1.11.0", "packages/google-cloud-resource-settings": "1.8.0", - "packages/google-cloud-retail": "1.17.0", + "packages/google-cloud-retail": "1.18.0", "packages/google-cloud-run": "0.10.1", "packages/google-cloud-scheduler": "2.12.0", "packages/google-cloud-secret-manager": "2.17.0", "packages/google-cloud-securesourcemanager": "0.1.1", "packages/google-cloud-securitycenter": "1.25.0", - "packages/google-cloud-securitycentermanagement": "0.1.0", + "packages/google-cloud-securitycentermanagement": "0.1.3", "packages/google-cloud-service-control": "1.10.0", "packages/google-cloud-service-directory": "1.10.0", "packages/google-cloud-service-management": "1.7.0", "packages/google-cloud-service-usage": "1.9.0", + "packages/google-cloud-servicehealth": "0.1.0", "packages/google-cloud-shell": "1.8.0", "packages/google-cloud-source-context": "1.5.0", "packages/google-cloud-speech": "2.23.0", @@ -139,19 +142,19 @@ "packages/google-cloud-storageinsights": "0.1.4", "packages/google-cloud-support": "0.1.3", "packages/google-cloud-talent": "2.12.0", - "packages/google-cloud-tasks": "2.15.0", + "packages/google-cloud-tasks": "2.15.1", "packages/google-cloud-telcoautomation": "0.1.2", - "packages/google-cloud-texttospeech": "2.15.0", + "packages/google-cloud-texttospeech": "2.15.1", "packages/google-cloud-tpu": "1.17.0", "packages/google-cloud-trace": "1.12.0", - "packages/google-cloud-translate": "3.13.0", + "packages/google-cloud-translate": "3.14.0", "packages/google-cloud-video-live-stream": "1.6.0", "packages/google-cloud-video-stitcher": "0.7.5", "packages/google-cloud-video-transcoder": "1.11.0", "packages/google-cloud-videointelligence": "2.12.0", "packages/google-cloud-vision": "3.5.0", "packages/google-cloud-vm-migration": "1.7.0", - "packages/google-cloud-vmwareengine": "1.2.0", + "packages/google-cloud-vmwareengine": "1.3.0", "packages/google-cloud-vpc-access": "1.9.0", "packages/google-cloud-webrisk": "1.13.0", "packages/google-cloud-websecurityscanner": "1.13.0", @@ -159,11 +162,12 @@ "packages/google-cloud-workstations": "0.5.2", "packages/google-geo-type": "0.3.4", "packages/google-maps-addressvalidation": "0.3.7", - "packages/google-maps-fleetengine": "0.1.2", - "packages/google-maps-fleetengine-delivery": "0.1.2", + "packages/google-maps-fleetengine": "0.1.4", + "packages/google-maps-fleetengine-delivery": "0.1.3", "packages/google-maps-mapsplatformdatasets": "0.3.3", - "packages/google-maps-places": "0.1.5", + "packages/google-maps-places": "0.1.6", "packages/google-maps-routing": "0.6.3", + "packages/google-shopping-css": "0.1.0", "packages/google-shopping-merchant-inventories": "0.1.1", "packages/google-shopping-merchant-reports": "0.1.1", "packages/google-shopping-type": "0.1.1", diff --git a/CHANGELOG.md b/CHANGELOG.md index cf94f333fa23..f97bb60f63d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,172 +2,177 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- -- [google-ai-generativelanguage==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) -- [google-analytics-admin==0.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) -- [google-apps-script-type==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) -- [google-area120-tables==0.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) -- [google-cloud-access-approval==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) -- [google-cloud-advisorynotifications==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) -- [google-cloud-alloydb-connectors==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) -- [google-cloud-api-gateway==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) -- [google-cloud-api-keys==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) -- [google-cloud-apigee-connect==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) -- [google-cloud-apigee-registry==0.6.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-appengine-admin==1.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) -- [google-cloud-appengine-logging==1.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) -- [google-cloud-artifact-registry==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) -- [google-cloud-assured-workloads==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) -- [google-cloud-automl==2.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) -- [google-cloud-bare-metal-solution==1.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnections==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnectors==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) -- [google-cloud-beyondcorp-appgateways==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) -- [google-cloud-beyondcorp-clientconnectorservices==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) -- [google-cloud-beyondcorp-clientgateways==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) -- [google-cloud-bigquery-analyticshub==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) -- [google-cloud-bigquery-biglake==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-connection==1.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.6.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) -- [google-cloud-bigquery-logging==1.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-bigquery-migration==0.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) -- [google-cloud-bigquery-reservation==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) -- [google-cloud-billing-budgets==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) -- [google-cloud-binary-authorization==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) -- [google-cloud-certificate-manager==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) -- [google-cloud-channel==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-commerce-consumer-procurement==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) -- [google-cloud-common==1.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) -- [google-cloud-compute==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) -- [google-cloud-config==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) -- [google-cloud-contact-center-insights==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) -- [google-cloud-containeranalysis==2.12.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) -- [google-cloud-data-fusion==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) -- [google-cloud-data-qna==0.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) -- [google-cloud-datacatalog-lineage==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) -- [google-cloud-dataflow-client==0.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) -- [google-cloud-dataform==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) -- [google-cloud-datalabeling==1.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) -- [google-cloud-dataproc-metastore==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) -- [google-cloud-datastream==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-dialogflow-cx==1.29.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) -- [google-cloud-dms==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.20.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) -- [google-cloud-domains==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) -- [google-cloud-edgecontainer==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) -- [google-cloud-enterpriseknowledgegraph==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) -- [google-cloud-essential-contacts==1.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) -- [google-cloud-eventarc-publishing==0.6.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) -- [google-cloud-eventarc==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-filestore==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) -- [google-cloud-functions==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gke-backup==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-hub==1.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) -- [google-cloud-gsuiteaddons==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) -- [google-cloud-iam-logging==1.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iam==2.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) -- [google-cloud-iap==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) -- [google-cloud-ids==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) -- [google-cloud-kms-inventory==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.19.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) -- [google-cloud-language==2.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) -- [google-cloud-life-sciences==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) -- [google-cloud-managed-identities==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-media-translation==0.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) -- [google-cloud-memcache==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) -- [google-cloud-migrationcenter==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) -- [google-cloud-monitoring-dashboards==2.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) -- [google-cloud-monitoring-metrics-scopes==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-monitoring==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) -- [google-cloud-network-connectivity==2.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) -- [google-cloud-network-security==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-network-services==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) -- [google-cloud-notebooks==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) -- [google-cloud-optimization==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) -- [google-cloud-os-config==1.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) -- [google-cloud-os-login==2.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) -- [google-cloud-phishing-protection==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) -- [google-cloud-policy-troubleshooter==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) -- [google-cloud-policysimulator==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) -- [google-cloud-policytroubleshooter-iam==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) -- [google-cloud-private-ca==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) -- [google-cloud-private-catalog==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-public-ca==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) -- [google-cloud-rapidmigrationassessment==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) -- [google-cloud-recommendations-ai==0.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis-cluster==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) -- [google-cloud-redis==2.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-retail==1.16.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) -- [google-cloud-run==0.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) -- [google-cloud-scheduler==2.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) -- [google-cloud-secret-manager==2.16.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securesourcemanager==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) -- [google-cloud-securitycenter==1.24.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-service-control==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.6.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) -- [google-cloud-service-usage==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) -- [google-cloud-shell==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) -- [google-cloud-source-context==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-speech==2.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) -- [google-cloud-storage-transfer==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) -- [google-cloud-support==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) -- [google-cloud-talent==2.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tasks==2.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) -- [google-cloud-telcoautomation==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) -- [google-cloud-tpu==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) -- [google-cloud-trace==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) -- [google-cloud-translate==3.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) -- [google-cloud-video-live-stream==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) -- [google-cloud-video-stitcher==0.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) -- [google-cloud-video-transcoder==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) -- [google-cloud-videointelligence==2.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) -- [google-cloud-vision==3.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-vm-migration==1.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) -- [google-cloud-vmwareengine==1.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) -- [google-cloud-vpc-access==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) -- [google-cloud-webrisk==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) -- [google-cloud-websecurityscanner==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) -- [google-geo-type==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) -- [google-maps-addressvalidation==0.3.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) -- [google-maps-fleetengine==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routing==0.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) -- [google-shopping-merchant-inventories==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) -- [google-shopping-merchant-reports==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) -- [google-shopping-type==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) -- [grafeas==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) +- [google-ai-generativelanguage==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-analytics-admin==0.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) +- [google-analytics-data==0.18.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-apps-meet==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) +- [google-apps-script-type==0.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) +- [google-area120-tables==0.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) +- [google-cloud-access-approval==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) +- [google-cloud-advisorynotifications==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) +- [google-cloud-alloydb-connectors==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) +- [google-cloud-alloydb==0.3.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-api-gateway==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) +- [google-cloud-api-keys==0.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) +- [google-cloud-apigee-connect==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) +- [google-cloud-apigee-registry==0.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) +- [google-cloud-appengine-admin==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) +- [google-cloud-appengine-logging==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) +- [google-cloud-artifact-registry==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) +- [google-cloud-asset==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-assured-workloads==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) +- [google-cloud-automl==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) +- [google-cloud-bare-metal-solution==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) +- [google-cloud-batch==0.17.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnections==0.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnectors==0.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) +- [google-cloud-beyondcorp-appgateways==0.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) +- [google-cloud-beyondcorp-clientconnectorservices==0.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) +- [google-cloud-beyondcorp-clientgateways==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) +- [google-cloud-bigquery-analyticshub==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) +- [google-cloud-bigquery-biglake==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) +- [google-cloud-bigquery-connection==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-logging==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-bigquery-reservation==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) +- [google-cloud-billing-budgets==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-binary-authorization==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) +- [google-cloud-build==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-certificate-manager==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) +- [google-cloud-channel==1.17.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) +- [google-cloud-cloudquotas==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) +- [google-cloud-commerce-consumer-procurement==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) +- [google-cloud-common==1.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) +- [google-cloud-compute==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) +- [google-cloud-contact-center-insights==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) +- [google-cloud-container==2.38.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-containeranalysis==2.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-data-fusion==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) +- [google-cloud-data-qna==0.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) +- [google-cloud-datacatalog-lineage==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) +- [google-cloud-datacatalog==3.17.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-dataflow-client==0.8.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) +- [google-cloud-dataform==0.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) +- [google-cloud-datalabeling==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) +- [google-cloud-dataplex==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataproc-metastore==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) +- [google-cloud-dataproc==5.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-datastream==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) +- [google-cloud-deploy==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.30.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-dialogflow==2.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dms==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) +- [google-cloud-documentai==2.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-domains==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) +- [google-cloud-edgecontainer==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-enterpriseknowledgegraph==0.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) +- [google-cloud-essential-contacts==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) +- [google-cloud-eventarc-publishing==0.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) +- [google-cloud-eventarc==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) +- [google-cloud-filestore==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) +- [google-cloud-functions==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-hub==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gsuiteaddons==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) +- [google-cloud-iam-logging==1.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) +- [google-cloud-iam==2.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-ids==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) +- [google-cloud-kms-inventory==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) +- [google-cloud-kms==2.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-language==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-life-sciences==0.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) +- [google-cloud-managed-identities==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-media-translation==0.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) +- [google-cloud-memcache==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) +- [google-cloud-migrationcenter==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) +- [google-cloud-monitoring-dashboards==2.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) +- [google-cloud-monitoring-metrics-scopes==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) +- [google-cloud-monitoring==2.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) +- [google-cloud-netapp==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-network-connectivity==2.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) +- [google-cloud-network-management==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-security==0.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) +- [google-cloud-network-services==0.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-notebooks==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-optimization==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-os-config==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) +- [google-cloud-os-login==2.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) +- [google-cloud-phishing-protection==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) +- [google-cloud-policy-troubleshooter==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) +- [google-cloud-policytroubleshooter-iam==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) +- [google-cloud-private-ca==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) +- [google-cloud-private-catalog==0.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) +- [google-cloud-public-ca==0.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-rapidmigrationassessment==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recommendations-ai==0.10.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) +- [google-cloud-recommender==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-redis-cluster==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) +- [google-cloud-redis==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) +- [google-cloud-resource-manager==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-settings==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-run==0.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) +- [google-cloud-scheduler==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) +- [google-cloud-secret-manager==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securesourcemanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) +- [google-cloud-securitycenter==1.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-securitycentermanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) +- [google-cloud-service-control==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) +- [google-cloud-service-directory==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-management==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-usage==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) +- [google-cloud-servicehealth==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) +- [google-cloud-shell==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) +- [google-cloud-source-context==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) +- [google-cloud-speech==2.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) +- [google-cloud-storage-transfer==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-support==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) +- [google-cloud-talent==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) +- [google-cloud-tasks==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) +- [google-cloud-telcoautomation==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) +- [google-cloud-texttospeech==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-tpu==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-trace==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) +- [google-cloud-translate==3.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) +- [google-cloud-video-live-stream==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-transcoder==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) +- [google-cloud-videointelligence==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) +- [google-cloud-vision==3.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) +- [google-cloud-vm-migration==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) +- [google-cloud-vmwareengine==1.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) +- [google-cloud-vpc-access==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) +- [google-cloud-webrisk==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) +- [google-cloud-websecurityscanner==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) +- [google-cloud-workflows==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-geo-type==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) +- [google-maps-addressvalidation==0.3.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routing==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) +- [google-shopping-css==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-merchant-inventories==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) +- [google-shopping-merchant-reports==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-shopping-type==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) +- [grafeas==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) diff --git a/containers/python-bootstrap-container/entrypoint.sh b/containers/python-bootstrap-container/entrypoint.sh index 978c57172fc5..6021572b23c3 100755 --- a/containers/python-bootstrap-container/entrypoint.sh +++ b/containers/python-bootstrap-container/entrypoint.sh @@ -22,6 +22,16 @@ function save_to_temp_then_file() { mv -f $TEMP_FILE "${1}" } +# replace_prefix STR FROM_PREFIX TO_PREFIX +# Echoes STR with any leading prefix FROM_PREFIX replaced by TO_PREFIX. +function replace_prefix () { + local STR="$1" + local PREFIX_FROM="$2" + local PREFIX_TO="$3" + local STEM=${STR#${PREFIX_FROM}} + [[ "${STR}" == "${STEM}" ]] && echo "${STR}" || echo "${PREFIX_TO}${STEM}" +} + MONO_REPO_NAME="google-cloud-python" WORKSPACE_DIR="/workspace" @@ -36,8 +46,8 @@ cd "$WORKSPACE_DIR/$MONO_REPO_NAME/containers/python-bootstrap-container" API_VERSION="$(echo $API_ID | sed 's/.*\.//')" # API_ID has the form google.cloud.*.vX or `google.*.*.vX` -# Replace `.`` with `-` -FOLDER_NAME="$(echo $API_ID | sed -E 's/\./-/g')" +# It forms the basis for FOLDER_NAME, which will be further modified in what follows. +FOLDER_NAME="${API_ID}" # if API_VERSION does not contain numbers, set API_VERSION to empty string if [[ ! $API_VERSION =~ [0-9] ]]; then @@ -46,9 +56,22 @@ else # Remove the trailing version from the FOLDER_NAME` # for `google.cloud.workflows.v1` # the folder should be `google-cloud-workflows` - FOLDER_NAME="$(echo $FOLDER_NAME | sed 's/-[^-]*$//')" + FOLDER_NAME="$(echo $FOLDER_NAME | sed 's@\.[^.]*$@@')" fi +# The directory in googleapis/googleapis-gen to configure in .OwlBot.yaml. +# Replace '.' with '/' +API_PATH="$(echo ${FOLDER_NAME} | sed -E 's@\.@/@g')" + +# Replace `.`` with `-` +FOLDER_NAME="$(echo ${FOLDER_NAME} | sed -E 's/\./-/g')" + +# Since we map protobuf packages google.protobuf.* to Python packages +# google.cloud.* (see +# https://github.com/googleapis/gapic-generator-python/issues/1899), ensure that +# that the PyPI package name reflects the Python package structure. +FOLDER_NAME="$(replace_prefix "${FOLDER_NAME}" google-api- google-cloud- )" + # Create the folder mkdir -p "$WORKSPACE_DIR/$MONO_REPO_NAME/packages/$FOLDER_NAME" @@ -68,8 +91,6 @@ else cp ".OwlBot.yaml" "${WORKSPACE_DIR}/${MONO_REPO_NAME}/packages/${FOLDER_NAME}/.OwlBot.yaml" fi -API_PATH="$(echo $FOLDER_NAME | sed -E 's/\-/\//g')" - # Update apiPath in .OwlBot.yaml sed -i -e "s|apiPath|$API_PATH|" "${WORKSPACE_DIR}/${MONO_REPO_NAME}/packages/${FOLDER_NAME}/.OwlBot.yaml" diff --git a/packages/google-ai-generativelanguage/.repo-metadata.json b/packages/google-ai-generativelanguage/.repo-metadata.json index bc3075ddf66f..3ae73c9657d8 100644 --- a/packages/google-ai-generativelanguage/.repo-metadata.json +++ b/packages/google-ai-generativelanguage/.repo-metadata.json @@ -11,7 +11,7 @@ "repo": "googleapis/google-cloud-python", "distribution_name": "google-ai-generativelanguage", "api_id": "generativelanguage.googleapis.com", - "default_version": "v1beta3", + "default_version": "v1beta", "codeowner_team": "", "api_shortname": "generativelanguage" } diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 5f1b80e625f3..31ad2045365c 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,43 @@ # Changelog +## [0.5.0](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.4.1...google-ai-generativelanguage-v0.5.0) (2024-01-24) + + +### âš  BREAKING CHANGES + +* Fix content.proto's Schema - `type` should be required + +### Features + +* Update GenAI libraries to include input_safety_feedback ([d2004d4](https://github.com/googleapis/google-cloud-python/commit/d2004d4a1c95333017b585ba905d5e0c4af45776)) + + +### Bug Fixes + +* Fix content.proto's Schema - `type` should be required ([d2004d4](https://github.com/googleapis/google-cloud-python/commit/d2004d4a1c95333017b585ba905d5e0c4af45776)) + + +### Documentation + +* Minor docs updates ([d2004d4](https://github.com/googleapis/google-cloud-python/commit/d2004d4a1c95333017b585ba905d5e0c4af45776)) +* Update summary, improve description for `title` in `EmbedContentRequest` ([d2004d4](https://github.com/googleapis/google-cloud-python/commit/d2004d4a1c95333017b585ba905d5e0c4af45776)) + +## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.4.0...google-ai-generativelanguage-v0.4.1) (2024-01-22) + + +### Documentation + +* [google-ai-generativelanguage] Fixed minor documentation typos for field `function_declarations` in message `google.ai.generativelanguage.v1beta.Tool` ([#12206](https://github.com/googleapis/google-cloud-python/issues/12206)) ([52957f3](https://github.com/googleapis/google-cloud-python/commit/52957f38e2d5dca5e873cfc7239a6ce469ed541f)) + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.3.5...google-ai-generativelanguage-v0.4.0) (2023-12-09) + + +### Features + +* Add v1, contains only GenerativeService, nothing else ([23d8814](https://github.com/googleapis/google-cloud-python/commit/23d8814baa6288d94484d52a98714fd32755ada3)) +* Add v1beta, adds GenerativeService and RetrievalService ([23d8814](https://github.com/googleapis/google-cloud-python/commit/23d8814baa6288d94484d52a98714fd32755ada3)) +* Set `google.ai.generativelanguage_v1beta` as the default import ([23d8814](https://github.com/googleapis/google-cloud-python/commit/23d8814baa6288d94484d52a98714fd32755ada3)) + ## [0.3.5](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.3.4...google-ai-generativelanguage-v0.3.5) (2023-12-07) diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1/generative_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/generative_service.rst new file mode 100644 index 000000000000..ce358b56764b --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/generative_service.rst @@ -0,0 +1,6 @@ +GenerativeService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1.services.generative_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1/model_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/model_service.rst new file mode 100644 index 000000000000..1ec9fb6f5766 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/model_service.rst @@ -0,0 +1,10 @@ +ModelService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1.services.model_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1.services.model_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/services_.rst new file mode 100644 index 000000000000..988dccd7ad40 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/services_.rst @@ -0,0 +1,7 @@ +Services for Google Ai Generativelanguage v1 API +================================================ +.. toctree:: + :maxdepth: 2 + + generative_service + model_service diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1/types_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/types_.rst new file mode 100644 index 000000000000..c3e9aef89c0e --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ai Generativelanguage v1 API +============================================= + +.. automodule:: google.ai.generativelanguage_v1.types + :members: + :show-inheritance: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/discuss_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/discuss_service.rst new file mode 100644 index 000000000000..f66ee1c4e90c --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/discuss_service.rst @@ -0,0 +1,6 @@ +DiscussService +-------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.discuss_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/generative_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/generative_service.rst new file mode 100644 index 000000000000..1e19732947cb --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/generative_service.rst @@ -0,0 +1,6 @@ +GenerativeService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.generative_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/model_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/model_service.rst new file mode 100644 index 000000000000..f656fafbec03 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/model_service.rst @@ -0,0 +1,10 @@ +ModelService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1beta.services.model_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1beta.services.model_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/permission_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/permission_service.rst new file mode 100644 index 000000000000..9c14fefd538a --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/permission_service.rst @@ -0,0 +1,10 @@ +PermissionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.permission_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1beta.services.permission_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/retriever_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/retriever_service.rst new file mode 100644 index 000000000000..704e781a41fc --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/retriever_service.rst @@ -0,0 +1,10 @@ +RetrieverService +---------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.retriever_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1beta.services.retriever_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst new file mode 100644 index 000000000000..2826bf7a6a6a --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -0,0 +1,11 @@ +Services for Google Ai Generativelanguage v1beta API +==================================================== +.. toctree:: + :maxdepth: 2 + + discuss_service + generative_service + model_service + permission_service + retriever_service + text_service diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/text_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/text_service.rst new file mode 100644 index 000000000000..4b17617a0273 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/text_service.rst @@ -0,0 +1,6 @@ +TextService +----------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.text_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/types_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/types_.rst new file mode 100644 index 000000000000..a5ac73b8a0d5 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ai Generativelanguage v1beta API +================================================= + +.. automodule:: google.ai.generativelanguage_v1beta.types + :members: + :show-inheritance: diff --git a/packages/google-ai-generativelanguage/docs/index.rst b/packages/google-ai-generativelanguage/docs/index.rst index 51e1b5474cd2..5688bf71543b 100644 --- a/packages/google-ai-generativelanguage/docs/index.rst +++ b/packages/google-ai-generativelanguage/docs/index.rst @@ -3,7 +3,7 @@ .. include:: multiprocessing.rst This package includes clients for multiple versions of Generative Language API. -By default, you will get version ``generativelanguage_v1beta3``. +By default, you will get version ``generativelanguage_v1beta``. API Reference @@ -11,8 +11,16 @@ API Reference .. toctree:: :maxdepth: 2 - generativelanguage_v1beta3/services_ - generativelanguage_v1beta3/types_ + generativelanguage_v1beta/services_ + generativelanguage_v1beta/types_ + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + generativelanguage_v1/services_ + generativelanguage_v1/types_ API Reference ------------- @@ -22,6 +30,14 @@ API Reference generativelanguage_v1beta2/services_ generativelanguage_v1beta2/types_ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + generativelanguage_v1beta3/services_ + generativelanguage_v1beta3/types_ + Changelog --------- diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index f81250e3fbf4..8201c76ab661 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -18,35 +18,60 @@ __version__ = package_version.__version__ -from google.ai.generativelanguage_v1beta3.services.discuss_service.async_client import ( +from google.ai.generativelanguage_v1beta.services.discuss_service.async_client import ( DiscussServiceAsyncClient, ) -from google.ai.generativelanguage_v1beta3.services.discuss_service.client import ( +from google.ai.generativelanguage_v1beta.services.discuss_service.client import ( DiscussServiceClient, ) -from google.ai.generativelanguage_v1beta3.services.model_service.async_client import ( +from google.ai.generativelanguage_v1beta.services.generative_service.async_client import ( + GenerativeServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.generative_service.client import ( + GenerativeServiceClient, +) +from google.ai.generativelanguage_v1beta.services.model_service.async_client import ( ModelServiceAsyncClient, ) -from google.ai.generativelanguage_v1beta3.services.model_service.client import ( +from google.ai.generativelanguage_v1beta.services.model_service.client import ( ModelServiceClient, ) -from google.ai.generativelanguage_v1beta3.services.permission_service.async_client import ( +from google.ai.generativelanguage_v1beta.services.permission_service.async_client import ( PermissionServiceAsyncClient, ) -from google.ai.generativelanguage_v1beta3.services.permission_service.client import ( +from google.ai.generativelanguage_v1beta.services.permission_service.client import ( PermissionServiceClient, ) -from google.ai.generativelanguage_v1beta3.services.text_service.async_client import ( +from google.ai.generativelanguage_v1beta.services.retriever_service.async_client import ( + RetrieverServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.retriever_service.client import ( + RetrieverServiceClient, +) +from google.ai.generativelanguage_v1beta.services.text_service.async_client import ( TextServiceAsyncClient, ) -from google.ai.generativelanguage_v1beta3.services.text_service.client import ( +from google.ai.generativelanguage_v1beta.services.text_service.client import ( TextServiceClient, ) -from google.ai.generativelanguage_v1beta3.types.citation import ( +from google.ai.generativelanguage_v1beta.types.citation import ( CitationMetadata, CitationSource, ) -from google.ai.generativelanguage_v1beta3.types.discuss_service import ( +from google.ai.generativelanguage_v1beta.types.content import ( + Blob, + Content, + FunctionCall, + FunctionDeclaration, + FunctionResponse, + GroundingPassage, + GroundingPassages, + Part, + Schema, + Tool, + Type, +) +from google.ai.generativelanguage_v1beta.types.discuss_service import ( CountMessageTokensRequest, CountMessageTokensResponse, Example, @@ -55,8 +80,27 @@ Message, MessagePrompt, ) -from google.ai.generativelanguage_v1beta3.types.model import Model -from google.ai.generativelanguage_v1beta3.types.model_service import ( +from google.ai.generativelanguage_v1beta.types.generative_service import ( + AttributionSourceId, + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateAnswerRequest, + GenerateAnswerResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + GroundingAttribution, + SemanticRetrieverConfig, + TaskType, +) +from google.ai.generativelanguage_v1beta.types.model import Model +from google.ai.generativelanguage_v1beta.types.model_service import ( CreateTunedModelMetadata, CreateTunedModelRequest, DeleteTunedModelRequest, @@ -68,8 +112,8 @@ ListTunedModelsResponse, UpdateTunedModelRequest, ) -from google.ai.generativelanguage_v1beta3.types.permission import Permission -from google.ai.generativelanguage_v1beta3.types.permission_service import ( +from google.ai.generativelanguage_v1beta.types.permission import Permission +from google.ai.generativelanguage_v1beta.types.permission_service import ( CreatePermissionRequest, DeletePermissionRequest, GetPermissionRequest, @@ -79,14 +123,54 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) -from google.ai.generativelanguage_v1beta3.types.safety import ( +from google.ai.generativelanguage_v1beta.types.retriever import ( + Chunk, + ChunkData, + Condition, + Corpus, + CustomMetadata, + Document, + MetadataFilter, + StringList, +) +from google.ai.generativelanguage_v1beta.types.retriever_service import ( + BatchCreateChunksRequest, + BatchCreateChunksResponse, + BatchDeleteChunksRequest, + BatchUpdateChunksRequest, + BatchUpdateChunksResponse, + CreateChunkRequest, + CreateCorpusRequest, + CreateDocumentRequest, + DeleteChunkRequest, + DeleteCorpusRequest, + DeleteDocumentRequest, + GetChunkRequest, + GetCorpusRequest, + GetDocumentRequest, + ListChunksRequest, + ListChunksResponse, + ListCorporaRequest, + ListCorporaResponse, + ListDocumentsRequest, + ListDocumentsResponse, + QueryCorpusRequest, + QueryCorpusResponse, + QueryDocumentRequest, + QueryDocumentResponse, + RelevantChunk, + UpdateChunkRequest, + UpdateCorpusRequest, + UpdateDocumentRequest, +) +from google.ai.generativelanguage_v1beta.types.safety import ( ContentFilter, HarmCategory, SafetyFeedback, SafetyRating, SafetySetting, ) -from google.ai.generativelanguage_v1beta3.types.text_service import ( +from google.ai.generativelanguage_v1beta.types.text_service import ( BatchEmbedTextRequest, BatchEmbedTextResponse, CountTextTokensRequest, @@ -99,7 +183,7 @@ TextCompletion, TextPrompt, ) -from google.ai.generativelanguage_v1beta3.types.tuned_model import ( +from google.ai.generativelanguage_v1beta.types.tuned_model import ( Dataset, Hyperparameters, TunedModel, @@ -113,14 +197,29 @@ __all__ = ( "DiscussServiceClient", "DiscussServiceAsyncClient", + "GenerativeServiceClient", + "GenerativeServiceAsyncClient", "ModelServiceClient", "ModelServiceAsyncClient", "PermissionServiceClient", "PermissionServiceAsyncClient", + "RetrieverServiceClient", + "RetrieverServiceAsyncClient", "TextServiceClient", "TextServiceAsyncClient", "CitationMetadata", "CitationSource", + "Blob", + "Content", + "FunctionCall", + "FunctionDeclaration", + "FunctionResponse", + "GroundingPassage", + "GroundingPassages", + "Part", + "Schema", + "Tool", + "Type", "CountMessageTokensRequest", "CountMessageTokensResponse", "Example", @@ -128,6 +227,23 @@ "GenerateMessageResponse", "Message", "MessagePrompt", + "AttributionSourceId", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "Candidate", + "ContentEmbedding", + "CountTokensRequest", + "CountTokensResponse", + "EmbedContentRequest", + "EmbedContentResponse", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerationConfig", + "GroundingAttribution", + "SemanticRetrieverConfig", + "TaskType", "Model", "CreateTunedModelMetadata", "CreateTunedModelRequest", @@ -148,6 +264,42 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "Chunk", + "ChunkData", + "Condition", + "Corpus", + "CustomMetadata", + "Document", + "MetadataFilter", + "StringList", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "BatchDeleteChunksRequest", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "CreateChunkRequest", + "CreateCorpusRequest", + "CreateDocumentRequest", + "DeleteChunkRequest", + "DeleteCorpusRequest", + "DeleteDocumentRequest", + "GetChunkRequest", + "GetCorpusRequest", + "GetDocumentRequest", + "ListChunksRequest", + "ListChunksResponse", + "ListCorporaRequest", + "ListCorporaResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "QueryCorpusRequest", + "QueryCorpusResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "RelevantChunk", + "UpdateChunkRequest", + "UpdateCorpusRequest", + "UpdateDocumentRequest", "ContentFilter", "SafetyFeedback", "SafetyRating", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 288d10b11145..3754937a30b0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py new file mode 100644 index 000000000000..ee4612eef573 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ai.generativelanguage_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, +) +from .services.model_service import ModelServiceAsyncClient, ModelServiceClient +from .types.citation import CitationMetadata, CitationSource +from .types.content import Blob, Content, Part +from .types.generative_service import ( + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + TaskType, +) +from .types.model import Model +from .types.model_service import GetModelRequest, ListModelsRequest, ListModelsResponse +from .types.safety import HarmCategory, SafetyRating, SafetySetting + +__all__ = ( + "GenerativeServiceAsyncClient", + "ModelServiceAsyncClient", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "Blob", + "Candidate", + "CitationMetadata", + "CitationSource", + "Content", + "ContentEmbedding", + "CountTokensRequest", + "CountTokensResponse", + "EmbedContentRequest", + "EmbedContentResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerationConfig", + "GenerativeServiceClient", + "GetModelRequest", + "HarmCategory", + "ListModelsRequest", + "ListModelsResponse", + "Model", + "ModelServiceClient", + "Part", + "SafetyRating", + "SafetySetting", + "TaskType", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_metadata.json new file mode 100644 index 000000000000..77717e17c5dc --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_metadata.json @@ -0,0 +1,152 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ai.generativelanguage_v1", + "protoPackage": "google.ai.generativelanguage.v1", + "schema": "1.0", + "services": { + "GenerativeService": { + "clients": { + "grpc": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GenerativeServiceAsyncClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "rest": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + } + } + }, + "ModelService": { + "clients": { + "grpc": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelServiceAsyncClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + }, + "rest": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py new file mode 100644 index 000000000000..3754937a30b0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/py.typed b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/py.typed new file mode 100644 index 000000000000..38773eee6363 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ai-generativelanguage package uses inline types. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/__init__.py new file mode 100644 index 000000000000..1e92ad575a7b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GenerativeServiceAsyncClient +from .client import GenerativeServiceClient + +__all__ = ( + "GenerativeServiceClient", + "GenerativeServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py new file mode 100644 index 000000000000..155abc59f941 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py @@ -0,0 +1,1069 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + AsyncIterable, + Awaitable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.types import content +from google.ai.generativelanguage_v1.types import content as gag_content +from google.ai.generativelanguage_v1.types import generative_service + +from .client import GenerativeServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport + + +class GenerativeServiceAsyncClient: + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + _client: GenerativeServiceClient + + DEFAULT_ENDPOINT = GenerativeServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(GenerativeServiceClient.model_path) + parse_model_path = staticmethod(GenerativeServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + GenerativeServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GenerativeServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GenerativeServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + GenerativeServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GenerativeServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GenerativeServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(GenerativeServiceClient.common_project_path) + parse_common_project_path = staticmethod( + GenerativeServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(GenerativeServiceClient.common_location_path) + parse_common_location_path = staticmethod( + GenerativeServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_info.__func__(GenerativeServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_file.__func__(GenerativeServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GenerativeServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, GenerativeServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.GenerativeServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GenerativeServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: + r"""Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stream_generate_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates an embedding from the model given an input + ``Content``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_embed_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.EmbedContentRequest, dict]]): + The request object. Request containing the ``Content`` for the model to + embed. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.ai.generativelanguage_v1.types.Content`): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.EmbedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.embed_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.BatchEmbedContentsRequest, dict]]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.ai.generativelanguage_v1.types.EmbedContentRequest]`): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.BatchEmbedContentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_embed_contents, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input content and returns + the token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.CountTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1.types.Content]`): + Required. The input given to the + model as a prompt. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.CountTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.count_tokens, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "GenerativeServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py new file mode 100644 index 000000000000..8d13ebaceba5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -0,0 +1,1252 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Iterable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.types import content +from google.ai.generativelanguage_v1.types import content as gag_content +from google.ai.generativelanguage_v1.types import generative_service + +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc import GenerativeServiceGrpcTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .transports.rest import GenerativeServiceRestTransport + + +class GenerativeServiceClientMeta(type): + """Metaclass for the GenerativeService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GenerativeServiceTransport]] + _transport_registry["grpc"] = GenerativeServiceGrpcTransport + _transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport + _transport_registry["rest"] = GenerativeServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GenerativeServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GenerativeServiceClient(metaclass=GenerativeServiceClientMeta): + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GenerativeServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GenerativeServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GenerativeServiceTransport): + # transport is a GenerativeServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.GenerateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[generative_service.GenerateContentResponse]: + r"""Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.GenerateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stream_generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates an embedding from the model given an input + ``Content``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_embed_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.EmbedContentRequest, dict]): + The request object. Request containing the ``Content`` for the model to + embed. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.ai.generativelanguage_v1.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.EmbedContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.EmbedContentRequest): + request = generative_service.EmbedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.BatchEmbedContentsRequest, dict]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ai.generativelanguage_v1.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.BatchEmbedContentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.BatchEmbedContentsRequest): + request = generative_service.BatchEmbedContentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_embed_contents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input content and returns + the token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_count_tokens(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.CountTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): + Required. The input given to the + model as a prompt. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.CountTokensRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.CountTokensRequest): + request = generative_service.CountTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GenerativeServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/__init__.py new file mode 100644 index 000000000000..1d35da543a1e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport +from .grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .rest import GenerativeServiceRestInterceptor, GenerativeServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GenerativeServiceTransport]] +_transport_registry["grpc"] = GenerativeServiceGrpcTransport +_transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport +_transport_registry["rest"] = GenerativeServiceRestTransport + +__all__ = ( + "GenerativeServiceTransport", + "GenerativeServiceGrpcTransport", + "GenerativeServiceGrpcAsyncIOTransport", + "GenerativeServiceRestTransport", + "GenerativeServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py new file mode 100644 index 000000000000..93ff928d4f57 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/base.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version +from google.ai.generativelanguage_v1.types import generative_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GenerativeServiceTransport(abc.ABC): + """Abstract transport class for GenerativeService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_content: gapic_v1.method.wrap_method( + self.generate_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.stream_generate_content: gapic_v1.method.wrap_method( + self.stream_generate_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.embed_content: gapic_v1.method.wrap_method( + self.embed_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_embed_contents: gapic_v1.method.wrap_method( + self.batch_embed_contents, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.count_tokens: gapic_v1.method.wrap_method( + self.count_tokens, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Union[ + generative_service.EmbedContentResponse, + Awaitable[generative_service.EmbedContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Union[ + generative_service.BatchEmbedContentsResponse, + Awaitable[generative_service.BatchEmbedContentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Union[ + generative_service.CountTokensResponse, + Awaitable[generative_service.CountTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GenerativeServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py new file mode 100644 index 000000000000..61a0782d47fa --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport + + +class GenerativeServiceGrpcTransport(GenerativeServiceTransport): + """gRPC backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self.grpc_channel.unary_stream( + "/google.ai.generativelanguage.v1.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates an embedding from the model given an input + ``Content``. + + Returns: + Callable[[~.EmbedContentRequest], + ~.EmbedContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + ~.BatchEmbedContentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input content and returns + the token count. + + Returns: + Callable[[~.CountTokensRequest], + ~.CountTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GenerativeServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4804c88a41a1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/grpc_asyncio.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport + + +class GenerativeServiceGrpcAsyncIOTransport(GenerativeServiceTransport): + """gRPC AsyncIO backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self.grpc_channel.unary_stream( + "/google.ai.generativelanguage.v1.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Awaitable[generative_service.EmbedContentResponse], + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates an embedding from the model given an input + ``Content``. + + Returns: + Callable[[~.EmbedContentRequest], + Awaitable[~.EmbedContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Awaitable[generative_service.BatchEmbedContentsResponse], + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + Awaitable[~.BatchEmbedContentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Awaitable[generative_service.CountTokensResponse], + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input content and returns + the token count. + + Returns: + Callable[[~.CountTokensRequest], + Awaitable[~.CountTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("GenerativeServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py new file mode 100644 index 000000000000..1630e5d48918 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -0,0 +1,1179 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.types import generative_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GenerativeServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GenerativeServiceRestInterceptor: + """Interceptor for GenerativeService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GenerativeServiceRestTransport. + + .. code-block:: python + class MyCustomGenerativeServiceInterceptor(GenerativeServiceRestInterceptor): + def pre_batch_embed_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_embed_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_count_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_embed_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stream_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stream_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GenerativeServiceRestTransport(interceptor=MyCustomGenerativeServiceInterceptor()) + client = GenerativeServiceClient(transport=transport) + + + """ + + def pre_batch_embed_contents( + self, + request: generative_service.BatchEmbedContentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.BatchEmbedContentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_embed_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_batch_embed_contents( + self, response: generative_service.BatchEmbedContentsResponse + ) -> generative_service.BatchEmbedContentsResponse: + """Post-rpc interceptor for batch_embed_contents + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_count_tokens( + self, + request: generative_service.CountTokensRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.CountTokensRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for count_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_count_tokens( + self, response: generative_service.CountTokensResponse + ) -> generative_service.CountTokensResponse: + """Post-rpc interceptor for count_tokens + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_embed_content( + self, + request: generative_service.EmbedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.EmbedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for embed_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_embed_content( + self, response: generative_service.EmbedContentResponse + ) -> generative_service.EmbedContentResponse: + """Post-rpc interceptor for embed_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.GenerateContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_generate_content( + self, response: generative_service.GenerateContentResponse + ) -> generative_service.GenerateContentResponse: + """Post-rpc interceptor for generate_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_stream_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.GenerateContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stream_generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_stream_generate_content( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for stream_generate_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GenerativeServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GenerativeServiceRestInterceptor + + +class GenerativeServiceRestTransport(GenerativeServiceTransport): + """REST backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GenerativeServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GenerativeServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchEmbedContents(GenerativeServiceRestStub): + def __hash__(self): + return hash("BatchEmbedContents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.BatchEmbedContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Call the batch embed contents method over HTTP. + + Args: + request (~.generative_service.BatchEmbedContentsRequest): + The request object. Batch request to get embeddings from + the model for a list of prompts. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.BatchEmbedContentsResponse: + The response to a ``BatchEmbedContentsRequest``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{model=models/*}:batchEmbedContents", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_embed_contents( + request, metadata + ) + pb_request = generative_service.BatchEmbedContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.BatchEmbedContentsResponse() + pb_resp = generative_service.BatchEmbedContentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_embed_contents(resp) + return resp + + class _CountTokens(GenerativeServiceRestStub): + def __hash__(self): + return hash("CountTokens") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.CountTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Call the count tokens method over HTTP. + + Args: + request (~.generative_service.CountTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.CountTokensResponse: + A response from ``CountTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{model=models/*}:countTokens", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_count_tokens(request, metadata) + pb_request = generative_service.CountTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.CountTokensResponse() + pb_resp = generative_service.CountTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_count_tokens(resp) + return resp + + class _EmbedContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("EmbedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.EmbedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Call the embed content method over HTTP. + + Args: + request (~.generative_service.EmbedContentRequest): + The request object. Request containing the ``Content`` for the model to + embed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.EmbedContentResponse: + The response to an ``EmbedContentRequest``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{model=models/*}:embedContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_embed_content(request, metadata) + pb_request = generative_service.EmbedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.EmbedContentResponse() + pb_resp = generative_service.EmbedContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_embed_content(resp) + return resp + + class _GenerateContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("GenerateContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Call the generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are + reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API contract is that: + + - either all requested candidates are returned or no + candidates at all + - no candidates are returned only if there was + something wrong with the prompt (see + ``prompt_feedback``) + - feedback on each candidate is reported on + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{model=models/*}:generateContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_content( + request, metadata + ) + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.GenerateContentResponse() + pb_resp = generative_service.GenerateContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_content(resp) + return resp + + class _StreamGenerateContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("StreamGenerateContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the stream generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are + reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API contract is that: + + - either all requested candidates are returned or no + candidates at all + - no candidates are returned only if there was + something wrong with the prompt (see + ``prompt_feedback``) + - feedback on each candidate is reported on + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{model=models/*}:streamGenerateContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stream_generate_content( + request, metadata + ) + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, generative_service.GenerateContentResponse + ) + resp = self._interceptor.post_stream_generate_content(resp) + return resp + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEmbedContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamGenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(GenerativeServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=tunedModels/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(GenerativeServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=tunedModels/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(GenerativeServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=operations}", + }, + { + "method": "get", + "uri": "/v1/{name=tunedModels/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GenerativeServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/__init__.py new file mode 100644 index 000000000000..5738b8bf4239 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ModelServiceAsyncClient +from .client import ModelServiceClient + +__all__ = ( + "ModelServiceClient", + "ModelServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py new file mode 100644 index 000000000000..78a5e79f603b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/async_client.py @@ -0,0 +1,625 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.services.model_service import pagers +from google.ai.generativelanguage_v1.types import model, model_service + +from .client import ModelServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport + + +class ModelServiceAsyncClient: + """Provides methods for getting metadata information about + Generative Models. + """ + + _client: ModelServiceClient + + DEFAULT_ENDPOINT = ModelServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ModelServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(ModelServiceClient.model_path) + parse_model_path = staticmethod(ModelServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + ModelServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ModelServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ModelServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ModelServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ModelServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + ModelServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ModelServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ModelServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ModelServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ModelServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_info.__func__(ModelServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_file.__func__(ModelServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ModelServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ModelServiceClient).get_transport_class, type(ModelServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ModelServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ModelServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_get_model(): + # Create a client + client = generativelanguage_v1.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.GetModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.GetModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_model, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsAsyncPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + async def sample_list_models(): + # Create a client + client = generativelanguage_v1.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1.types.ListModelsRequest, dict]]): + The request object. Request for listing all Models. + page_size (:class:`int`): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.services.model_service.pagers.ListModelsAsyncPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.ListModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_models, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListModelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ModelServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py new file mode 100644 index 000000000000..e806af2e2aec --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py @@ -0,0 +1,856 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.services.model_service import pagers +from google.ai.generativelanguage_v1.types import model, model_service + +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc import ModelServiceGrpcTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .transports.rest import ModelServiceRestTransport + + +class ModelServiceClientMeta(type): + """Metaclass for the ModelService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] + _transport_registry["grpc"] = ModelServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ModelServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ModelServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ModelServiceClient(metaclass=ModelServiceClientMeta): + """Provides methods for getting metadata information about + Generative Models. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ModelServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ModelServiceTransport): + # transport is a ModelServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_get_model(): + # Create a client + client = generativelanguage_v1.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.GetModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.GetModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.GetModelRequest): + request = model_service.GetModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1 + + def sample_list_models(): + # Create a client + client = generativelanguage_v1.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1.types.ListModelsRequest, dict]): + The request object. Request for listing all Models. + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1.services.model_service.pagers.ListModelsPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.ListModelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.ListModelsRequest): + request = model_service.ListModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_models] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListModelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ModelServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/pagers.py new file mode 100644 index 000000000000..036124acfb49 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1.types import model, model_service + + +class ListModelsPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1.types.ListModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListModelsResponse], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[model.Model]: + for page in self.pages: + yield from page.models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsAsyncPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1.types.ListModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListModelsResponse]], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[model.Model]: + async def async_generator(): + async for page in self.pages: + for response in page.models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/__init__.py new file mode 100644 index 000000000000..1b430a25489e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ModelServiceTransport +from .grpc import ModelServiceGrpcTransport +from .grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .rest import ModelServiceRestInterceptor, ModelServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] +_transport_registry["grpc"] = ModelServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ModelServiceRestTransport + +__all__ = ( + "ModelServiceTransport", + "ModelServiceGrpcTransport", + "ModelServiceGrpcAsyncIOTransport", + "ModelServiceRestTransport", + "ModelServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/base.py new file mode 100644 index 000000000000..7c5be644584f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/base.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1 import gapic_version as package_version +from google.ai.generativelanguage_v1.types import model, model_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ModelServiceTransport(abc.ABC): + """Abstract transport class for ModelService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_model: gapic_v1.method.wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: gapic_v1.method.wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_model( + self, + ) -> Callable[ + [model_service.GetModelRequest], Union[model.Model, Awaitable[model.Model]] + ]: + raise NotImplementedError() + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], + Union[ + model_service.ListModelsResponse, + Awaitable[model_service.ListModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ModelServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py new file mode 100644 index 000000000000..cbd253b4ec85 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport + + +class ModelServiceGrpcTransport(ModelServiceTransport): + """gRPC backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + ~.Model]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + ~.ListModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ModelServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..47ff53a30928 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/grpc_asyncio.py @@ -0,0 +1,347 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .grpc import ModelServiceGrpcTransport + + +class ModelServiceGrpcAsyncIOTransport(ModelServiceTransport): + """gRPC AsyncIO backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_model( + self, + ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + Awaitable[~.Model]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], Awaitable[model_service.ListModelsResponse] + ]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + Awaitable[~.ListModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ModelServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py new file mode 100644 index 000000000000..fd9c2c508cd2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -0,0 +1,688 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1.types import model, model_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ModelServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ModelServiceRestInterceptor: + """Interceptor for ModelService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ModelServiceRestTransport. + + .. code-block:: python + class MyCustomModelServiceInterceptor(ModelServiceRestInterceptor): + def pre_get_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_models(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ModelServiceRestTransport(interceptor=MyCustomModelServiceInterceptor()) + client = ModelServiceClient(transport=transport) + + + """ + + def pre_get_model( + self, + request: model_service.GetModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.GetModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_model(self, response: model.Model) -> model.Model: + """Post-rpc interceptor for get_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_models( + self, + request: model_service.ListModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.ListModelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_models( + self, response: model_service.ListModelsResponse + ) -> model_service.ListModelsResponse: + """Post-rpc interceptor for list_models + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ModelServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ModelServiceRestInterceptor + + +class ModelServiceRestTransport(ModelServiceTransport): + """REST backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ModelServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ModelServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetModel(ModelServiceRestStub): + def __hash__(self): + return hash("GetModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.GetModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Call the get model method over HTTP. + + Args: + request (~.model_service.GetModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model.Model: + Information about a Generative + Language Model. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=models/*}", + }, + ] + request, metadata = self._interceptor.pre_get_model(request, metadata) + pb_request = model_service.GetModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model.Model() + pb_resp = model.Model.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_model(resp) + return resp + + class _ListModels(ModelServiceRestStub): + def __hash__(self): + return hash("ListModels") + + def __call__( + self, + request: model_service.ListModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model_service.ListModelsResponse: + r"""Call the list models method over HTTP. + + Args: + request (~.model_service.ListModelsRequest): + The request object. Request for listing all Models. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model_service.ListModelsResponse: + Response from ``ListModel`` containing a paginated list + of Models. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/models", + }, + ] + request, metadata = self._interceptor.pre_list_models(request, metadata) + pb_request = model_service.ListModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListModelsResponse() + pb_resp = model_service.ListModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_models(resp) + return resp + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ModelServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=tunedModels/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ModelServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=tunedModels/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ModelServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=operations}", + }, + { + "method": "get", + "uri": "/v1/{name=tunedModels/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ModelServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py new file mode 100644 index 000000000000..88334b0f1dc4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .citation import CitationMetadata, CitationSource +from .content import Blob, Content, Part +from .generative_service import ( + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + TaskType, +) +from .model import Model +from .model_service import GetModelRequest, ListModelsRequest, ListModelsResponse +from .safety import HarmCategory, SafetyRating, SafetySetting + +__all__ = ( + "CitationMetadata", + "CitationSource", + "Blob", + "Content", + "Part", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "Candidate", + "ContentEmbedding", + "CountTokensRequest", + "CountTokensResponse", + "EmbedContentRequest", + "EmbedContentResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerationConfig", + "TaskType", + "Model", + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "SafetyRating", + "SafetySetting", + "HarmCategory", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/citation.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/citation.py new file mode 100644 index 000000000000..b3c3a6ff6681 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/citation.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "CitationMetadata", + "CitationSource", + }, +) + + +class CitationMetadata(proto.Message): + r"""A collection of source attributions for a piece of content. + + Attributes: + citation_sources (MutableSequence[google.ai.generativelanguage_v1.types.CitationSource]): + Citations to sources for a specific response. + """ + + citation_sources: MutableSequence["CitationSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CitationSource", + ) + + +class CitationSource(proto.Message): + r"""A citation to a source for a portion of a specific response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_index (int): + Optional. Start of segment of the response + that is attributed to this source. + + Index indicates the start of the segment, + measured in bytes. + + This field is a member of `oneof`_ ``_start_index``. + end_index (int): + Optional. End of the attributed segment, + exclusive. + + This field is a member of `oneof`_ ``_end_index``. + uri (str): + Optional. URI that is attributed as a source + for a portion of the text. + + This field is a member of `oneof`_ ``_uri``. + license_ (str): + Optional. License for the GitHub project that + is attributed as a source for segment. + + License info is required for code citations. + + This field is a member of `oneof`_ ``_license``. + """ + + start_index: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_index: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + license_: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py new file mode 100644 index 000000000000..067597740b04 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/content.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "Content", + "Part", + "Blob", + }, +) + + +class Content(proto.Message): + r"""The base structured datatype containing multi-part content of a + message. + + A ``Content`` includes a ``role`` field designating the producer of + the ``Content`` and a ``parts`` field containing multi-part data + that contains the content of the message turn. + + Attributes: + parts (MutableSequence[google.ai.generativelanguage_v1.types.Part]): + Ordered ``Parts`` that constitute a single message. Parts + may have different MIME types. + role (str): + Optional. The producer of the content. Must + be either 'user' or 'model'. + Useful to set for multi-turn conversations, + otherwise can be left blank or unset. + """ + + parts: MutableSequence["Part"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Part", + ) + role: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Part(proto.Message): + r"""A datatype containing media that is part of a multi-part ``Content`` + message. + + A ``Part`` consists of data which has an associated datatype. A + ``Part`` can only contain one of the accepted types in + ``Part.data``. + + A ``Part`` must have a fixed IANA MIME type identifying the type and + subtype of the media if the ``inline_data`` field is filled with raw + bytes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + Inline text. + + This field is a member of `oneof`_ ``data``. + inline_data (google.ai.generativelanguage_v1.types.Blob): + Inline media bytes. + + This field is a member of `oneof`_ ``data``. + """ + + text: str = proto.Field( + proto.STRING, + number=2, + oneof="data", + ) + inline_data: "Blob" = proto.Field( + proto.MESSAGE, + number=3, + oneof="data", + message="Blob", + ) + + +class Blob(proto.Message): + r"""Raw media bytes. + + Text should not be sent as raw bytes, use the 'text' field. + + Attributes: + mime_type (str): + The IANA standard MIME type of the source + data. Accepted types include: "image/png", + "image/jpeg", "image/heic", "image/heif", + "image/webp". + data (bytes): + Raw bytes for media formats. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py new file mode 100644 index 000000000000..d1759ce7490f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -0,0 +1,600 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1.types import citation +from google.ai.generativelanguage_v1.types import content as gag_content +from google.ai.generativelanguage_v1.types import safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "TaskType", + "GenerateContentRequest", + "GenerationConfig", + "GenerateContentResponse", + "Candidate", + "EmbedContentRequest", + "ContentEmbedding", + "EmbedContentResponse", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "CountTokensRequest", + "CountTokensResponse", + }, +) + + +class TaskType(proto.Enum): + r"""Type of task for which the embedding will be used. + + Values: + TASK_TYPE_UNSPECIFIED (0): + Unset value, which will default to one of the + other enum values. + RETRIEVAL_QUERY (1): + Specifies the given text is a query in a + search/retrieval setting. + RETRIEVAL_DOCUMENT (2): + Specifies the given text is a document from + the corpus being searched. + SEMANTIC_SIMILARITY (3): + Specifies the given text will be used for + STS. + CLASSIFICATION (4): + Specifies that the given text will be + classified. + CLUSTERING (5): + Specifies that the embeddings will be used + for clustering. + """ + TASK_TYPE_UNSPECIFIED = 0 + RETRIEVAL_QUERY = 1 + RETRIEVAL_DOCUMENT = 2 + SEMANTIC_SIMILARITY = 3 + CLASSIFICATION = 4 + CLUSTERING = 5 + + +class GenerateContentRequest(proto.Message): + r"""Request to generate a completion from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the ``Model`` to use for generating + the completion. + + Format: ``name=models/{model}``. + contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a single + instance. For multi-turn queries, this is a + repeated field that contains conversation + history + latest request. + safety_settings (MutableSequence[google.ai.generativelanguage_v1.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + This will be enforced on the + ``GenerateContentRequest.contents`` and + ``GenerateContentResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any contents and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT + are supported. + generation_config (google.ai.generativelanguage_v1.types.GenerationConfig): + Optional. Configuration options for model + generation and outputs. + + This field is a member of `oneof`_ ``_generation_config``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.SafetySetting, + ) + generation_config: "GenerationConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="GenerationConfig", + ) + + +class GenerationConfig(proto.Message): + r"""Configuration options for model generation and outputs. Not + all parameters may be configurable for every model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, this + will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + stop_sequences (MutableSequence[str]): + Optional. The set of character sequences (up + to 5) that will stop output generation. If + specified, the API will stop at the first + appearance of a stop sequence. The stop sequence + will not be included as part of the response. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit specified + in the ``Model`` specification. + + This field is a member of `oneof`_ ``_max_output_tokens``. + temperature (float): + Optional. Controls the randomness of the output. Note: The + default value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most likely tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits number of tokens + based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_k``. + """ + + candidate_count: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + + +class GenerateContentResponse(proto.Message): + r"""Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for + each candidate in ``finish_reason`` and in ``safety_ratings``. The + API contract is that: + + - either all requested candidates are returned or no candidates at + all + - no candidates are returned only if there was something wrong with + the prompt (see ``prompt_feedback``) + - feedback on each candidate is reported on ``finish_reason`` and + ``safety_ratings``. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1.types.Candidate]): + Candidate responses from the model. + prompt_feedback (google.ai.generativelanguage_v1.types.GenerateContentResponse.PromptFeedback): + Returns the prompt's feedback related to the + content filters. + """ + + class PromptFeedback(proto.Message): + r"""A set of the feedback metadata the prompt specified in + ``GenerateContentRequest.content``. + + Attributes: + block_reason (google.ai.generativelanguage_v1.types.GenerateContentResponse.PromptFeedback.BlockReason): + Optional. If set, the prompt was blocked and + no candidates are returned. Rephrase your + prompt. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): + Ratings for safety of the prompt. + There is at most one rating per category. + """ + + class BlockReason(proto.Enum): + r"""Specifies what was the reason why prompt was blocked. + + Values: + BLOCK_REASON_UNSPECIFIED (0): + Default value. This value is unused. + SAFETY (1): + Prompt was blocked due to safety reasons. You can inspect + ``safety_ratings`` to understand which safety category + blocked it. + OTHER (2): + Prompt was blocked due to unknown reaasons. + """ + BLOCK_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( + proto.Field( + proto.ENUM, + number=1, + enum="GenerateContentResponse.PromptFeedback.BlockReason", + ) + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + + candidates: MutableSequence["Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Candidate", + ) + prompt_feedback: PromptFeedback = proto.Field( + proto.MESSAGE, + number=2, + message=PromptFeedback, + ) + + +class Candidate(proto.Message): + r"""A response candidate generated from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + index (int): + Output only. Index of the candidate in the + list of candidates. + + This field is a member of `oneof`_ ``_index``. + content (google.ai.generativelanguage_v1.types.Content): + Output only. Generated content returned from + the model. + finish_reason (google.ai.generativelanguage_v1.types.Candidate.FinishReason): + Optional. Output only. The reason why the + model stopped generating tokens. + If empty, the model has not stopped generating + the tokens. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1.types.SafetyRating]): + List of ratings for the safety of a response + candidate. + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1.types.CitationMetadata): + Output only. Citation information for model-generated + candidate. + + This field may be populated with recitation information for + any text included in the ``content``. These are passages + that are "recited" from copyrighted material in the + foundational LLM's training data. + token_count (int): + Output only. Token count for this candidate. + """ + + class FinishReason(proto.Enum): + r"""Defines the reason why the model stopped generating tokens. + + Values: + FINISH_REASON_UNSPECIFIED (0): + Default value. This value is unused. + STOP (1): + Natural stop point of the model or provided + stop sequence. + MAX_TOKENS (2): + The maximum number of tokens as specified in + the request was reached. + SAFETY (3): + The candidate content was flagged for safety + reasons. + RECITATION (4): + The candidate content was flagged for + recitation reasons. + OTHER (5): + Unknown reason. + """ + FINISH_REASON_UNSPECIFIED = 0 + STOP = 1 + MAX_TOKENS = 2 + SAFETY = 3 + RECITATION = 4 + OTHER = 5 + + index: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=1, + message=gag_content.Content, + ) + finish_reason: FinishReason = proto.Field( + proto.ENUM, + number=2, + enum=FinishReason, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=6, + message=citation.CitationMetadata, + ) + token_count: int = proto.Field( + proto.INT32, + number=7, + ) + + +class EmbedContentRequest(proto.Message): + r"""Request containing the ``Content`` for the model to embed. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + content (google.ai.generativelanguage_v1.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + task_type (google.ai.generativelanguage_v1.types.TaskType): + Optional. Optional task type for which the embeddings will + be used. Can only be set for ``models/embedding-001``. + + This field is a member of `oneof`_ ``_task_type``. + title (str): + Optional. An optional title for the text. Only applicable + when TaskType is ``RETRIEVAL_DOCUMENT``. + + Note: Specifying a ``title`` for ``RETRIEVAL_DOCUMENT`` + provides better quality embeddings for retrieval. + + This field is a member of `oneof`_ ``_title``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + task_type: "TaskType" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="TaskType", + ) + title: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class ContentEmbedding(proto.Message): + r"""A list of floats representing an embedding. + + Attributes: + values (MutableSequence[float]): + The embedding values. + """ + + values: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +class EmbedContentResponse(proto.Message): + r"""The response to an ``EmbedContentRequest``. + + Attributes: + embedding (google.ai.generativelanguage_v1.types.ContentEmbedding): + Output only. The embedding generated from the + input content. + """ + + embedding: "ContentEmbedding" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class BatchEmbedContentsRequest(proto.Message): + r"""Batch request to get embeddings from the model for a list of + prompts. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + requests (MutableSequence[google.ai.generativelanguage_v1.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in each of + these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["EmbedContentRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="EmbedContentRequest", + ) + + +class BatchEmbedContentsResponse(proto.Message): + r"""The response to a ``BatchEmbedContentsRequest``. + + Attributes: + embeddings (MutableSequence[google.ai.generativelanguage_v1.types.ContentEmbedding]): + Output only. The embeddings for each request, + in the same order as provided in the batch + request. + """ + + embeddings: MutableSequence["ContentEmbedding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class CountTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): + Required. The input given to the model as a + prompt. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + + +class CountTokensResponse(proto.Message): + r"""A response from ``CountTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + total_tokens (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + total_tokens: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py new file mode 100644 index 000000000000..f4d6dad89d1c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "Model", + }, +) + + +class Model(proto.Message): + r"""Information about a Generative Language Model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the ``Model``. + + Format: ``models/{model}`` with a ``{model}`` naming + convention of: + + - "{base_model_id}-{version}" + + Examples: + + - ``models/chat-bison-001`` + base_model_id (str): + Required. The name of the base model, pass this to the + generation request. + + Examples: + + - ``chat-bison`` + version (str): + Required. The version number of the model. + + This represents the major version + display_name (str): + The human-readable name of the model. E.g. + "Chat Bison". + The name can be up to 128 characters long and + can consist of any UTF-8 characters. + description (str): + A short description of the model. + input_token_limit (int): + Maximum number of input tokens allowed for + this model. + output_token_limit (int): + Maximum number of output tokens available for + this model. + supported_generation_methods (MutableSequence[str]): + The model's supported generation methods. + + The method names are defined as Pascal case strings, such as + ``generateMessage`` which correspond to API methods. + temperature (float): + Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. This + value specifies default to be used by the backend while + making the call to the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + For Nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. This value specifies + default to be used by the backend while making the call to + the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. + + This field is a member of `oneof`_ ``_top_k``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + base_model_id: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + input_token_limit: int = proto.Field( + proto.INT32, + number=6, + ) + output_token_limit: int = proto.Field( + proto.INT32, + number=7, + ) + supported_generation_methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=10, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=11, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py new file mode 100644 index 000000000000..7494a8d2a179 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/model_service.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1.types import model + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + }, +) + + +class GetModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListModelsRequest(proto.Message): + r"""Request for listing all Models. + + Attributes: + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at most + 50 models will be returned per page. This method returns at + most 1000 models per page, even if you pass a larger + page_size. + page_token (str): + A page token, received from a previous ``ListModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListModelsResponse(proto.Message): + r"""Response from ``ListModel`` containing a paginated list of Models. + + Attributes: + models (MutableSequence[google.ai.generativelanguage_v1.types.Model]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + models: MutableSequence[model.Model] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=model.Model, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py new file mode 100644 index 000000000000..2548c0f301cb --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1", + manifest={ + "HarmCategory", + "SafetyRating", + "SafetySetting", + }, +) + + +class HarmCategory(proto.Enum): + r"""The category of a rating. + + These categories cover various kinds of harms that developers + may wish to adjust. + + Values: + HARM_CATEGORY_UNSPECIFIED (0): + Category is unspecified. + HARM_CATEGORY_DEROGATORY (1): + Negative or harmful comments targeting + identity and/or protected attribute. + HARM_CATEGORY_TOXICITY (2): + Content that is rude, disrepspectful, or + profane. + HARM_CATEGORY_VIOLENCE (3): + Describes scenarios depictng violence against + an individual or group, or general descriptions + of gore. + HARM_CATEGORY_SEXUAL (4): + Contains references to sexual acts or other + lewd content. + HARM_CATEGORY_MEDICAL (5): + Promotes unchecked medical advice. + HARM_CATEGORY_DANGEROUS (6): + Dangerous content that promotes, facilitates, + or encourages harmful acts. + HARM_CATEGORY_HARASSMENT (7): + Harasment content. + HARM_CATEGORY_HATE_SPEECH (8): + Hate speech and content. + HARM_CATEGORY_SEXUALLY_EXPLICIT (9): + Sexually explicit content. + HARM_CATEGORY_DANGEROUS_CONTENT (10): + Dangerous content. + """ + HARM_CATEGORY_UNSPECIFIED = 0 + HARM_CATEGORY_DEROGATORY = 1 + HARM_CATEGORY_TOXICITY = 2 + HARM_CATEGORY_VIOLENCE = 3 + HARM_CATEGORY_SEXUAL = 4 + HARM_CATEGORY_MEDICAL = 5 + HARM_CATEGORY_DANGEROUS = 6 + HARM_CATEGORY_HARASSMENT = 7 + HARM_CATEGORY_HATE_SPEECH = 8 + HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 + HARM_CATEGORY_DANGEROUS_CONTENT = 10 + + +class SafetyRating(proto.Message): + r"""Safety rating for a piece of content. + + The safety rating contains the category of harm and the harm + probability level in that category for a piece of content. + Content is classified for safety across a number of harm + categories and the probability of the harm classification is + included here. + + Attributes: + category (google.ai.generativelanguage_v1.types.HarmCategory): + Required. The category for this rating. + probability (google.ai.generativelanguage_v1.types.SafetyRating.HarmProbability): + Required. The probability of harm for this + content. + blocked (bool): + Was this content blocked because of this + rating? + """ + + class HarmProbability(proto.Enum): + r"""The probability that a piece of content is harmful. + + The classification system gives the probability of the content + being unsafe. This does not indicate the severity of harm for a + piece of content. + + Values: + HARM_PROBABILITY_UNSPECIFIED (0): + Probability is unspecified. + NEGLIGIBLE (1): + Content has a negligible chance of being + unsafe. + LOW (2): + Content has a low chance of being unsafe. + MEDIUM (3): + Content has a medium chance of being unsafe. + HIGH (4): + Content has a high chance of being unsafe. + """ + HARM_PROBABILITY_UNSPECIFIED = 0 + NEGLIGIBLE = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + probability: HarmProbability = proto.Field( + proto.ENUM, + number=4, + enum=HarmProbability, + ) + blocked: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class SafetySetting(proto.Message): + r"""Safety setting, affecting the safety-blocking behavior. + + Passing a safety setting for a category changes the allowed + proability that content is blocked. + + Attributes: + category (google.ai.generativelanguage_v1.types.HarmCategory): + Required. The category for this setting. + threshold (google.ai.generativelanguage_v1.types.SafetySetting.HarmBlockThreshold): + Required. Controls the probability threshold + at which harm is blocked. + """ + + class HarmBlockThreshold(proto.Enum): + r"""Block at and beyond a specified harm probability. + + Values: + HARM_BLOCK_THRESHOLD_UNSPECIFIED (0): + Threshold is unspecified. + BLOCK_LOW_AND_ABOVE (1): + Content with NEGLIGIBLE will be allowed. + BLOCK_MEDIUM_AND_ABOVE (2): + Content with NEGLIGIBLE and LOW will be + allowed. + BLOCK_ONLY_HIGH (3): + Content with NEGLIGIBLE, LOW, and MEDIUM will + be allowed. + BLOCK_NONE (4): + All content will be allowed. + """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 + BLOCK_LOW_AND_ABOVE = 1 + BLOCK_MEDIUM_AND_ABOVE = 2 + BLOCK_ONLY_HIGH = 3 + BLOCK_NONE = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + threshold: HarmBlockThreshold = proto.Field( + proto.ENUM, + number=4, + enum=HarmBlockThreshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py new file mode 100644 index 000000000000..77571d3938df --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.discuss_service import DiscussServiceAsyncClient, DiscussServiceClient +from .services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, +) +from .services.model_service import ModelServiceAsyncClient, ModelServiceClient +from .services.permission_service import ( + PermissionServiceAsyncClient, + PermissionServiceClient, +) +from .services.retriever_service import ( + RetrieverServiceAsyncClient, + RetrieverServiceClient, +) +from .services.text_service import TextServiceAsyncClient, TextServiceClient +from .types.citation import CitationMetadata, CitationSource +from .types.content import ( + Blob, + Content, + FunctionCall, + FunctionDeclaration, + FunctionResponse, + GroundingPassage, + GroundingPassages, + Part, + Schema, + Tool, + Type, +) +from .types.discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .types.generative_service import ( + AttributionSourceId, + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateAnswerRequest, + GenerateAnswerResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + GroundingAttribution, + SemanticRetrieverConfig, + TaskType, +) +from .types.model import Model +from .types.model_service import ( + CreateTunedModelMetadata, + CreateTunedModelRequest, + DeleteTunedModelRequest, + GetModelRequest, + GetTunedModelRequest, + ListModelsRequest, + ListModelsResponse, + ListTunedModelsRequest, + ListTunedModelsResponse, + UpdateTunedModelRequest, +) +from .types.permission import Permission +from .types.permission_service import ( + CreatePermissionRequest, + DeletePermissionRequest, + GetPermissionRequest, + ListPermissionsRequest, + ListPermissionsResponse, + TransferOwnershipRequest, + TransferOwnershipResponse, + UpdatePermissionRequest, +) +from .types.retriever import ( + Chunk, + ChunkData, + Condition, + Corpus, + CustomMetadata, + Document, + MetadataFilter, + StringList, +) +from .types.retriever_service import ( + BatchCreateChunksRequest, + BatchCreateChunksResponse, + BatchDeleteChunksRequest, + BatchUpdateChunksRequest, + BatchUpdateChunksResponse, + CreateChunkRequest, + CreateCorpusRequest, + CreateDocumentRequest, + DeleteChunkRequest, + DeleteCorpusRequest, + DeleteDocumentRequest, + GetChunkRequest, + GetCorpusRequest, + GetDocumentRequest, + ListChunksRequest, + ListChunksResponse, + ListCorporaRequest, + ListCorporaResponse, + ListDocumentsRequest, + ListDocumentsResponse, + QueryCorpusRequest, + QueryCorpusResponse, + QueryDocumentRequest, + QueryDocumentResponse, + RelevantChunk, + UpdateChunkRequest, + UpdateCorpusRequest, + UpdateDocumentRequest, +) +from .types.safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .types.text_service import ( + BatchEmbedTextRequest, + BatchEmbedTextResponse, + CountTextTokensRequest, + CountTextTokensResponse, + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) +from .types.tuned_model import ( + Dataset, + Hyperparameters, + TunedModel, + TunedModelSource, + TuningExample, + TuningExamples, + TuningSnapshot, + TuningTask, +) + +__all__ = ( + "DiscussServiceAsyncClient", + "GenerativeServiceAsyncClient", + "ModelServiceAsyncClient", + "PermissionServiceAsyncClient", + "RetrieverServiceAsyncClient", + "TextServiceAsyncClient", + "AttributionSourceId", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "BatchDeleteChunksRequest", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "Blob", + "Candidate", + "Chunk", + "ChunkData", + "CitationMetadata", + "CitationSource", + "Condition", + "Content", + "ContentEmbedding", + "ContentFilter", + "Corpus", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "CountTextTokensRequest", + "CountTextTokensResponse", + "CountTokensRequest", + "CountTokensResponse", + "CreateChunkRequest", + "CreateCorpusRequest", + "CreateDocumentRequest", + "CreatePermissionRequest", + "CreateTunedModelMetadata", + "CreateTunedModelRequest", + "CustomMetadata", + "Dataset", + "DeleteChunkRequest", + "DeleteCorpusRequest", + "DeleteDocumentRequest", + "DeletePermissionRequest", + "DeleteTunedModelRequest", + "DiscussServiceClient", + "Document", + "EmbedContentRequest", + "EmbedContentResponse", + "EmbedTextRequest", + "EmbedTextResponse", + "Embedding", + "Example", + "FunctionCall", + "FunctionDeclaration", + "FunctionResponse", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerateMessageRequest", + "GenerateMessageResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "GenerationConfig", + "GenerativeServiceClient", + "GetChunkRequest", + "GetCorpusRequest", + "GetDocumentRequest", + "GetModelRequest", + "GetPermissionRequest", + "GetTunedModelRequest", + "GroundingAttribution", + "GroundingPassage", + "GroundingPassages", + "HarmCategory", + "Hyperparameters", + "ListChunksRequest", + "ListChunksResponse", + "ListCorporaRequest", + "ListCorporaResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "ListModelsRequest", + "ListModelsResponse", + "ListPermissionsRequest", + "ListPermissionsResponse", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "Message", + "MessagePrompt", + "MetadataFilter", + "Model", + "ModelServiceClient", + "Part", + "Permission", + "PermissionServiceClient", + "QueryCorpusRequest", + "QueryCorpusResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "RelevantChunk", + "RetrieverServiceClient", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "Schema", + "SemanticRetrieverConfig", + "StringList", + "TaskType", + "TextCompletion", + "TextPrompt", + "TextServiceClient", + "Tool", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + "TunedModel", + "TunedModelSource", + "TuningExample", + "TuningExamples", + "TuningSnapshot", + "TuningTask", + "Type", + "UpdateChunkRequest", + "UpdateCorpusRequest", + "UpdateDocumentRequest", + "UpdatePermissionRequest", + "UpdateTunedModelRequest", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..c7fd001bd974 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -0,0 +1,798 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ai.generativelanguage_v1beta", + "protoPackage": "google.ai.generativelanguage.v1beta", + "schema": "1.0", + "services": { + "DiscussService": { + "clients": { + "grpc": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DiscussServiceAsyncClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "rest": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + } + } + }, + "GenerativeService": { + "clients": { + "grpc": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GenerativeServiceAsyncClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "rest": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + } + } + }, + "ModelService": { + "clients": { + "grpc": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelServiceAsyncClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + }, + "rest": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + } + } + }, + "PermissionService": { + "clients": { + "grpc": { + "libraryClient": "PermissionServiceClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PermissionServiceAsyncClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + }, + "rest": { + "libraryClient": "PermissionServiceClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + } + } + }, + "RetrieverService": { + "clients": { + "grpc": { + "libraryClient": "RetrieverServiceClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RetrieverServiceAsyncClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "rest": { + "libraryClient": "RetrieverServiceClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + } + } + }, + "TextService": { + "clients": { + "grpc": { + "libraryClient": "TextServiceClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextServiceAsyncClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "rest": { + "libraryClient": "TextServiceClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py new file mode 100644 index 000000000000..3754937a30b0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/py.typed b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/py.typed new file mode 100644 index 000000000000..38773eee6363 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ai-generativelanguage package uses inline types. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/__init__.py new file mode 100644 index 000000000000..2247026798d5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DiscussServiceAsyncClient +from .client import DiscussServiceClient + +__all__ = ( + "DiscussServiceClient", + "DiscussServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py new file mode 100644 index 000000000000..29b97ea0607f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/async_client.py @@ -0,0 +1,564 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import discuss_service, safety + +from .client import DiscussServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport + + +class DiscussServiceAsyncClient: + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + _client: DiscussServiceClient + + DEFAULT_ENDPOINT = DiscussServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(DiscussServiceClient.model_path) + parse_model_path = staticmethod(DiscussServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + DiscussServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DiscussServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DiscussServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DiscussServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DiscussServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DiscussServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DiscussServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DiscussServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DiscussServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DiscussServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_info.__func__(DiscussServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_file.__func__(DiscussServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DiscussServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DiscussServiceClient).get_transport_class, type(DiscussServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DiscussServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DiscussServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DiscussServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateMessageRequest, dict]]): + The request object. Request to generate a message + response from the model. + model (:class:`str`): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta.types.MessagePrompt`): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = discuss_service.GenerateMessageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_message, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CountMessageTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta.types.MessagePrompt`): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = discuss_service.CountMessageTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.count_message_tokens, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DiscussServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py new file mode 100644 index 000000000000..e99768784230 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py @@ -0,0 +1,775 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import discuss_service, safety + +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc import DiscussServiceGrpcTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .transports.rest import DiscussServiceRestTransport + + +class DiscussServiceClientMeta(type): + """Metaclass for the DiscussService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DiscussServiceTransport]] + _transport_registry["grpc"] = DiscussServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DiscussServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DiscussServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DiscussServiceClient(metaclass=DiscussServiceClientMeta): + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DiscussServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DiscussServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DiscussServiceTransport): + # transport is a DiscussServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GenerateMessageRequest, dict]): + The request object. Request to generate a message + response from the model. + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta.types.MessagePrompt): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a discuss_service.GenerateMessageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, discuss_service.GenerateMessageRequest): + request = discuss_service.GenerateMessageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CountMessageTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta.types.MessagePrompt): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a discuss_service.CountMessageTokensRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, discuss_service.CountMessageTokensRequest): + request = discuss_service.CountMessageTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_message_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DiscussServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/__init__.py new file mode 100644 index 000000000000..209ce4db6d6e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport +from .grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .rest import DiscussServiceRestInterceptor, DiscussServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DiscussServiceTransport]] +_transport_registry["grpc"] = DiscussServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DiscussServiceRestTransport + +__all__ = ( + "DiscussServiceTransport", + "DiscussServiceGrpcTransport", + "DiscussServiceGrpcAsyncIOTransport", + "DiscussServiceRestTransport", + "DiscussServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/base.py new file mode 100644 index 000000000000..49d99b8d9187 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/base.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import discuss_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DiscussServiceTransport(abc.ABC): + """Abstract transport class for DiscussService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_message: gapic_v1.method.wrap_method( + self.generate_message, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.count_message_tokens: gapic_v1.method.wrap_method( + self.count_message_tokens, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Union[ + discuss_service.GenerateMessageResponse, + Awaitable[discuss_service.GenerateMessageResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Union[ + discuss_service.CountMessageTokensResponse, + Awaitable[discuss_service.CountMessageTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DiscussServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc.py new file mode 100644 index 000000000000..7495a7f14ba0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport + + +class DiscussServiceGrpcTransport(DiscussServiceTransport): + """gRPC backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + ~.GenerateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + ~.CountMessageTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DiscussServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..26a81cb81dc4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/grpc_asyncio.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport + + +class DiscussServiceGrpcAsyncIOTransport(DiscussServiceTransport): + """gRPC AsyncIO backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Awaitable[discuss_service.GenerateMessageResponse], + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + Awaitable[~.GenerateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Awaitable[discuss_service.CountMessageTokensResponse], + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + Awaitable[~.CountMessageTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DiscussServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py new file mode 100644 index 000000000000..2dd9f09f2a16 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DiscussServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DiscussServiceRestInterceptor: + """Interceptor for DiscussService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DiscussServiceRestTransport. + + .. code-block:: python + class MyCustomDiscussServiceInterceptor(DiscussServiceRestInterceptor): + def pre_count_message_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_message_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_message(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_message(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DiscussServiceRestTransport(interceptor=MyCustomDiscussServiceInterceptor()) + client = DiscussServiceClient(transport=transport) + + + """ + + def pre_count_message_tokens( + self, + request: discuss_service.CountMessageTokensRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[discuss_service.CountMessageTokensRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for count_message_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_count_message_tokens( + self, response: discuss_service.CountMessageTokensResponse + ) -> discuss_service.CountMessageTokensResponse: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + def pre_generate_message( + self, + request: discuss_service.GenerateMessageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[discuss_service.GenerateMessageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_message + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_generate_message( + self, response: discuss_service.GenerateMessageResponse + ) -> discuss_service.GenerateMessageResponse: + """Post-rpc interceptor for generate_message + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DiscussServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DiscussServiceRestInterceptor + + +class DiscussServiceRestTransport(DiscussServiceTransport): + """REST backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DiscussServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DiscussServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CountMessageTokens(DiscussServiceRestStub): + def __hash__(self): + return hash("CountMessageTokens") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: discuss_service.CountMessageTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Call the count message tokens method over HTTP. + + Args: + request (~.discuss_service.CountMessageTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discuss_service.CountMessageTokensResponse: + A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:countMessageTokens", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_count_message_tokens( + request, metadata + ) + pb_request = discuss_service.CountMessageTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.CountMessageTokensResponse() + pb_resp = discuss_service.CountMessageTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_count_message_tokens(resp) + return resp + + class _GenerateMessage(DiscussServiceRestStub): + def __hash__(self): + return hash("GenerateMessage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: discuss_service.GenerateMessageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Call the generate message method over HTTP. + + Args: + request (~.discuss_service.GenerateMessageRequest): + The request object. Request to generate a message + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discuss_service.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:generateMessage", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_message( + request, metadata + ) + pb_request = discuss_service.GenerateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.GenerateMessageResponse() + pb_resp = discuss_service.GenerateMessageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_message(resp) + return resp + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountMessageTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateMessage(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DiscussServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/__init__.py new file mode 100644 index 000000000000..1e92ad575a7b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GenerativeServiceAsyncClient +from .client import GenerativeServiceClient + +__all__ = ( + "GenerativeServiceClient", + "GenerativeServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py new file mode 100644 index 000000000000..7acbe07d0e8d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py @@ -0,0 +1,1073 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + AsyncIterable, + Awaitable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import generative_service, safety +from google.ai.generativelanguage_v1beta.types import content +from google.ai.generativelanguage_v1beta.types import content as gag_content + +from .client import GenerativeServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport + + +class GenerativeServiceAsyncClient: + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + _client: GenerativeServiceClient + + DEFAULT_ENDPOINT = GenerativeServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(GenerativeServiceClient.model_path) + parse_model_path = staticmethod(GenerativeServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + GenerativeServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GenerativeServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GenerativeServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + GenerativeServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GenerativeServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GenerativeServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(GenerativeServiceClient.common_project_path) + parse_common_project_path = staticmethod( + GenerativeServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(GenerativeServiceClient.common_location_path) + parse_common_location_path = staticmethod( + GenerativeServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_info.__func__(GenerativeServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_file.__func__(GenerativeServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GenerativeServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(GenerativeServiceClient).get_transport_class, type(GenerativeServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, GenerativeServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.GenerativeServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GenerativeServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_answer( + self, + request: Optional[Union[generative_service.GenerateAnswerRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + safety_settings: Optional[MutableSequence[safety.SafetySetting]] = None, + answer_style: Optional[ + generative_service.GenerateAnswerRequest.AnswerStyle + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_generate_answer(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = await client.generate_answer(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]]): + The request object. Request to generate a grounded answer + from the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the grounded response. + + Format: ``model=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): + Required. The content of the current conversation with + the model. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and + the last ``Content`` in the list containing the + question. + + Note: GenerateAnswer currently only supports queries in + English. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + safety_settings (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]`): + Optional. A list of unique ``SafetySetting`` instances + for blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not + be more than one setting for each ``SafetyCategory`` + type. The API will block any contents and responses that + fail to meet the thresholds set by these settings. This + list overrides the default settings for each + ``SafetyCategory`` specified in the safety_settings. If + there is no ``SafetySetting`` for a given + ``SafetyCategory`` provided in the list, the API will + use the default safety setting for that category. + + This corresponds to the ``safety_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + answer_style (:class:`google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle`): + Required. Style in which answers + should be returned. + + This corresponds to the ``answer_style`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents, safety_settings, answer_style]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.GenerateAnswerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if answer_style is not None: + request.answer_style = answer_style + if contents: + request.contents.extend(contents) + if safety_settings: + request.safety_settings.extend(safety_settings) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_answer, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: + r"""Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stream_generate_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates an embedding from the model given an input + ``Content``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_embed_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.EmbedContentRequest, dict]]): + The request object. Request containing the ``Content`` for the model to + embed. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.ai.generativelanguage_v1beta.types.Content`): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.EmbedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.embed_content, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1beta.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.BatchEmbedContentsRequest, dict]]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]`): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.BatchEmbedContentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_embed_contents, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input content and returns + the token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CountTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1beta.types.Content]`): + Required. The input given to the + model as a prompt. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = generative_service.CountTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.count_tokens, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GenerativeServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py new file mode 100644 index 000000000000..26736e75ad23 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -0,0 +1,1247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Iterable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import generative_service, safety +from google.ai.generativelanguage_v1beta.types import content +from google.ai.generativelanguage_v1beta.types import content as gag_content + +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc import GenerativeServiceGrpcTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .transports.rest import GenerativeServiceRestTransport + + +class GenerativeServiceClientMeta(type): + """Metaclass for the GenerativeService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GenerativeServiceTransport]] + _transport_registry["grpc"] = GenerativeServiceGrpcTransport + _transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport + _transport_registry["rest"] = GenerativeServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GenerativeServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GenerativeServiceClient(metaclass=GenerativeServiceClientMeta): + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GenerativeServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GenerativeServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GenerativeServiceTransport): + # transport is a GenerativeServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.GenerateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_answer( + self, + request: Optional[Union[generative_service.GenerateAnswerRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + safety_settings: Optional[MutableSequence[safety.SafetySetting]] = None, + answer_style: Optional[ + generative_service.GenerateAnswerRequest.AnswerStyle + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_generate_answer(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = client.generate_answer(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]): + The request object. Request to generate a grounded answer + from the model. + model (str): + Required. The name of the ``Model`` to use for + generating the grounded response. + + Format: ``model=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The content of the current conversation with + the model. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and + the last ``Content`` in the list containing the + question. + + Note: GenerateAnswer currently only supports queries in + English. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances + for blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not + be more than one setting for each ``SafetyCategory`` + type. The API will block any contents and responses that + fail to meet the thresholds set by these settings. This + list overrides the default settings for each + ``SafetyCategory`` specified in the safety_settings. If + there is no ``SafetySetting`` for a given + ``SafetyCategory`` provided in the list, the API will + use the default safety setting for that category. + + This corresponds to the ``safety_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + answer_style (google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle): + Required. Style in which answers + should be returned. + + This corresponds to the ``answer_style`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents, safety_settings, answer_style]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.GenerateAnswerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.GenerateAnswerRequest): + request = generative_service.GenerateAnswerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + if safety_settings is not None: + request.safety_settings = safety_settings + if answer_style is not None: + request.answer_style = answer_style + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_answer] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[generative_service.GenerateContentResponse]: + r"""Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a + single instance. For multi-turn queries, + this is a repeated field that contains + conversation history + latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They + are reported for both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API contract is that: - either all requested + candidates are returned or no candidates at all - no + candidates are returned only if there was something + wrong with the prompt (see prompt_feedback) - + feedback on each candidate is reported on + finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.GenerateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stream_generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates an embedding from the model given an input + ``Content``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_embed_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.EmbedContentRequest, dict]): + The request object. Request containing the ``Content`` for the model to + embed. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.ai.generativelanguage_v1beta.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.EmbedContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.EmbedContentRequest): + request = generative_service.EmbedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1beta.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.BatchEmbedContentsRequest, dict]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.BatchEmbedContentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.BatchEmbedContentsRequest): + request = generative_service.BatchEmbedContentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_embed_contents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input content and returns + the token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_count_tokens(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CountTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The input given to the + model as a prompt. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a generative_service.CountTokensRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, generative_service.CountTokensRequest): + request = generative_service.CountTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GenerativeServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/__init__.py new file mode 100644 index 000000000000..1d35da543a1e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport +from .grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .rest import GenerativeServiceRestInterceptor, GenerativeServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GenerativeServiceTransport]] +_transport_registry["grpc"] = GenerativeServiceGrpcTransport +_transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport +_transport_registry["rest"] = GenerativeServiceRestTransport + +__all__ = ( + "GenerativeServiceTransport", + "GenerativeServiceGrpcTransport", + "GenerativeServiceGrpcAsyncIOTransport", + "GenerativeServiceRestTransport", + "GenerativeServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py new file mode 100644 index 000000000000..7fffa3e4d2d4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/base.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import generative_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GenerativeServiceTransport(abc.ABC): + """Abstract transport class for GenerativeService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_content: gapic_v1.method.wrap_method( + self.generate_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.generate_answer: gapic_v1.method.wrap_method( + self.generate_answer, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.stream_generate_content: gapic_v1.method.wrap_method( + self.stream_generate_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.embed_content: gapic_v1.method.wrap_method( + self.embed_content, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_embed_contents: gapic_v1.method.wrap_method( + self.batch_embed_contents, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.count_tokens: gapic_v1.method.wrap_method( + self.count_tokens, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + Union[ + generative_service.GenerateAnswerResponse, + Awaitable[generative_service.GenerateAnswerResponse], + ], + ]: + raise NotImplementedError() + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Union[ + generative_service.EmbedContentResponse, + Awaitable[generative_service.EmbedContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Union[ + generative_service.BatchEmbedContentsResponse, + Awaitable[generative_service.BatchEmbedContentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Union[ + generative_service.CountTokensResponse, + Awaitable[generative_service.CountTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GenerativeServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py new file mode 100644 index 000000000000..b1d1aaa0fb7e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc.py @@ -0,0 +1,420 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport + + +class GenerativeServiceGrpcTransport(GenerativeServiceTransport): + """gRPC backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + generative_service.GenerateAnswerResponse, + ]: + r"""Return a callable for the generate answer method over gRPC. + + Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + Returns: + Callable[[~.GenerateAnswerRequest], + ~.GenerateAnswerResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_answer" not in self._stubs: + self._stubs["generate_answer"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/GenerateAnswer", + request_serializer=generative_service.GenerateAnswerRequest.serialize, + response_deserializer=generative_service.GenerateAnswerResponse.deserialize, + ) + return self._stubs["generate_answer"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self.grpc_channel.unary_stream( + "/google.ai.generativelanguage.v1beta.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates an embedding from the model given an input + ``Content``. + + Returns: + Callable[[~.EmbedContentRequest], + ~.EmbedContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + ~.BatchEmbedContentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input content and returns + the token count. + + Returns: + Callable[[~.CountTokensRequest], + ~.CountTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GenerativeServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..62d461f879db --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/grpc_asyncio.py @@ -0,0 +1,420 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport + + +class GenerativeServiceGrpcAsyncIOTransport(GenerativeServiceTransport): + """gRPC AsyncIO backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + Awaitable[generative_service.GenerateAnswerResponse], + ]: + r"""Return a callable for the generate answer method over gRPC. + + Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + Returns: + Callable[[~.GenerateAnswerRequest], + Awaitable[~.GenerateAnswerResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_answer" not in self._stubs: + self._stubs["generate_answer"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/GenerateAnswer", + request_serializer=generative_service.GenerateAnswerRequest.serialize, + response_deserializer=generative_service.GenerateAnswerResponse.deserialize, + ) + return self._stubs["generate_answer"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a streamed response from the model given an input + ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self.grpc_channel.unary_stream( + "/google.ai.generativelanguage.v1beta.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Awaitable[generative_service.EmbedContentResponse], + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates an embedding from the model given an input + ``Content``. + + Returns: + Callable[[~.EmbedContentRequest], + Awaitable[~.EmbedContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Awaitable[generative_service.BatchEmbedContentsResponse], + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + Awaitable[~.BatchEmbedContentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Awaitable[generative_service.CountTokensResponse], + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input content and returns + the token count. + + Returns: + Callable[[~.CountTokensRequest], + Awaitable[~.CountTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("GenerativeServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py new file mode 100644 index 000000000000..5e91c374f274 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py @@ -0,0 +1,1051 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import generative_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GenerativeServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GenerativeServiceRestInterceptor: + """Interceptor for GenerativeService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GenerativeServiceRestTransport. + + .. code-block:: python + class MyCustomGenerativeServiceInterceptor(GenerativeServiceRestInterceptor): + def pre_batch_embed_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_embed_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_count_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_embed_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_answer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_answer(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stream_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stream_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GenerativeServiceRestTransport(interceptor=MyCustomGenerativeServiceInterceptor()) + client = GenerativeServiceClient(transport=transport) + + + """ + + def pre_batch_embed_contents( + self, + request: generative_service.BatchEmbedContentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.BatchEmbedContentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_embed_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_batch_embed_contents( + self, response: generative_service.BatchEmbedContentsResponse + ) -> generative_service.BatchEmbedContentsResponse: + """Post-rpc interceptor for batch_embed_contents + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_count_tokens( + self, + request: generative_service.CountTokensRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.CountTokensRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for count_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_count_tokens( + self, response: generative_service.CountTokensResponse + ) -> generative_service.CountTokensResponse: + """Post-rpc interceptor for count_tokens + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_embed_content( + self, + request: generative_service.EmbedContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.EmbedContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for embed_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_embed_content( + self, response: generative_service.EmbedContentResponse + ) -> generative_service.EmbedContentResponse: + """Post-rpc interceptor for embed_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_generate_answer( + self, + request: generative_service.GenerateAnswerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.GenerateAnswerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_answer + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_generate_answer( + self, response: generative_service.GenerateAnswerResponse + ) -> generative_service.GenerateAnswerResponse: + """Post-rpc interceptor for generate_answer + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.GenerateContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_generate_content( + self, response: generative_service.GenerateContentResponse + ) -> generative_service.GenerateContentResponse: + """Post-rpc interceptor for generate_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_stream_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[generative_service.GenerateContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stream_generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_stream_generate_content( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for stream_generate_content + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GenerativeServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GenerativeServiceRestInterceptor + + +class GenerativeServiceRestTransport(GenerativeServiceTransport): + """REST backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GenerativeServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GenerativeServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchEmbedContents(GenerativeServiceRestStub): + def __hash__(self): + return hash("BatchEmbedContents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.BatchEmbedContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Call the batch embed contents method over HTTP. + + Args: + request (~.generative_service.BatchEmbedContentsRequest): + The request object. Batch request to get embeddings from + the model for a list of prompts. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.BatchEmbedContentsResponse: + The response to a ``BatchEmbedContentsRequest``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:batchEmbedContents", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_embed_contents( + request, metadata + ) + pb_request = generative_service.BatchEmbedContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.BatchEmbedContentsResponse() + pb_resp = generative_service.BatchEmbedContentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_embed_contents(resp) + return resp + + class _CountTokens(GenerativeServiceRestStub): + def __hash__(self): + return hash("CountTokens") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.CountTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.CountTokensResponse: + r"""Call the count tokens method over HTTP. + + Args: + request (~.generative_service.CountTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.CountTokensResponse: + A response from ``CountTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:countTokens", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_count_tokens(request, metadata) + pb_request = generative_service.CountTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.CountTokensResponse() + pb_resp = generative_service.CountTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_count_tokens(resp) + return resp + + class _EmbedContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("EmbedContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.EmbedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Call the embed content method over HTTP. + + Args: + request (~.generative_service.EmbedContentRequest): + The request object. Request containing the ``Content`` for the model to + embed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.EmbedContentResponse: + The response to an ``EmbedContentRequest``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:embedContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_embed_content(request, metadata) + pb_request = generative_service.EmbedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.EmbedContentResponse() + pb_resp = generative_service.EmbedContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_embed_content(resp) + return resp + + class _GenerateAnswer(GenerativeServiceRestStub): + def __hash__(self): + return hash("GenerateAnswer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.GenerateAnswerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Call the generate answer method over HTTP. + + Args: + request (~.generative_service.GenerateAnswerRequest): + The request object. Request to generate a grounded answer + from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:generateAnswer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_answer(request, metadata) + pb_request = generative_service.GenerateAnswerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.GenerateAnswerResponse() + pb_resp = generative_service.GenerateAnswerResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_answer(resp) + return resp + + class _GenerateContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("GenerateContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Call the generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are + reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API contract is that: + + - either all requested candidates are returned or no + candidates at all + - no candidates are returned only if there was + something wrong with the prompt (see + ``prompt_feedback``) + - feedback on each candidate is reported on + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:generateContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_content( + request, metadata + ) + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.GenerateContentResponse() + pb_resp = generative_service.GenerateContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_content(resp) + return resp + + class _StreamGenerateContent(GenerativeServiceRestStub): + def __hash__(self): + return hash("StreamGenerateContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the stream generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are + reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API contract is that: + + - either all requested candidates are returned or no + candidates at all + - no candidates are returned only if there was + something wrong with the prompt (see + ``prompt_feedback``) + - feedback on each candidate is reported on + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:streamGenerateContent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stream_generate_content( + request, metadata + ) + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, generative_service.GenerateContentResponse + ) + resp = self._interceptor.post_stream_generate_content(resp) + return resp + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEmbedContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + generative_service.GenerateAnswerResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAnswer(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamGenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GenerativeServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/__init__.py new file mode 100644 index 000000000000..5738b8bf4239 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ModelServiceAsyncClient +from .client import ModelServiceClient + +__all__ = ( + "ModelServiceClient", + "ModelServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py new file mode 100644 index 000000000000..d82ec5481b16 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/async_client.py @@ -0,0 +1,1101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.model_service import pagers +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +from .client import ModelServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport + + +class ModelServiceAsyncClient: + """Provides methods for getting metadata information about + Generative Models. + """ + + _client: ModelServiceClient + + DEFAULT_ENDPOINT = ModelServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ModelServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(ModelServiceClient.model_path) + parse_model_path = staticmethod(ModelServiceClient.parse_model_path) + tuned_model_path = staticmethod(ModelServiceClient.tuned_model_path) + parse_tuned_model_path = staticmethod(ModelServiceClient.parse_tuned_model_path) + common_billing_account_path = staticmethod( + ModelServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ModelServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ModelServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ModelServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ModelServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + ModelServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ModelServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ModelServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ModelServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ModelServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_info.__func__(ModelServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_file.__func__(ModelServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ModelServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ModelServiceClient).get_transport_class, type(ModelServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ModelServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ModelServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.GetModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_model, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsAsyncPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListModelsRequest, dict]]): + The request object. Request for listing all Models. + page_size (:class:`int`): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.model_service.pagers.ListModelsAsyncPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.ListModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_models, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListModelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tuned_model( + self, + request: Optional[Union[model_service.GetTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tuned_model.TunedModel: + r"""Gets information about a specific TunedModel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetTunedModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.GetTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_tuned_model, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tuned_models( + self, + request: Optional[Union[model_service.ListTunedModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTunedModelsAsyncPager: + r"""Lists tuned models owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest, dict]]): + The request object. Request for listing TunedModels. + page_size (:class:`int`): + Optional. The maximum number of ``TunedModels`` to + return (per page). The service may return fewer tuned + models. + + If unspecified, at most 10 tuned models will be + returned. This method returns at most 1000 models per + page, even if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided + the page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.model_service.pagers.ListTunedModelsAsyncPager: + Response from ListTunedModels containing a paginated + list of Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.ListTunedModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tuned_models, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTunedModelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tuned_model( + self, + request: Optional[Union[model_service.CreateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + tuned_model_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a tuned model. Intermediate tuning progress (if any) is + accessed through the [google.longrunning.Operations] service. + + Status and results can be accessed through the Operations + service. Example: GET + /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.CreateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreateTunedModelRequest, dict]]): + The request object. Request to create a TunedModel. + tuned_model (:class:`google.ai.generativelanguage_v1beta.types.TunedModel`): + Required. The tuned model to create. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tuned_model_id (:class:`str`): + Optional. The unique id for the tuned model if + specified. This value should be up to 40 characters, the + first character must be a letter, the last could be a + letter or a number. The id must match the regular + expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + + This corresponds to the ``tuned_model_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.ai.generativelanguage_v1beta.types.TunedModel` + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, tuned_model_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.CreateTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if tuned_model_id is not None: + request.tuned_model_id = tuned_model_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tuned_model, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gag_tuned_model.TunedModel, + metadata_type=model_service.CreateTunedModelMetadata, + ) + + # Done; return the response. + return response + + async def update_tuned_model( + self, + request: Optional[Union[model_service.UpdateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Updates a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.UpdateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + response = await client.update_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdateTunedModelRequest, dict]]): + The request object. Request to update a TunedModel. + tuned_model (:class:`google.ai.generativelanguage_v1beta.types.TunedModel`): + Required. The tuned model to update. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.UpdateTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tuned_model, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tuned_model.name", request.tuned_model.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tuned_model( + self, + request: Optional[Union[model_service.DeleteTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + await client.delete_tuned_model(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeleteTunedModelRequest, dict]]): + The request object. Request to delete a TunedModel. + name (:class:`str`): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = model_service.DeleteTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tuned_model, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ModelServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py new file mode 100644 index 000000000000..f98851c12c22 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py @@ -0,0 +1,1282 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.model_service import pagers +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc import ModelServiceGrpcTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .transports.rest import ModelServiceRestTransport + + +class ModelServiceClientMeta(type): + """Metaclass for the ModelService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] + _transport_registry["grpc"] = ModelServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ModelServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ModelServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ModelServiceClient(metaclass=ModelServiceClientMeta): + """Provides methods for getting metadata information about + Generative Models. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tuned_model_path( + tuned_model: str, + ) -> str: + """Returns a fully-qualified tuned_model string.""" + return "tunedModels/{tuned_model}".format( + tuned_model=tuned_model, + ) + + @staticmethod + def parse_tuned_model_path(path: str) -> Dict[str, str]: + """Parses a tuned_model path into its component segments.""" + m = re.match(r"^tunedModels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ModelServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ModelServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ModelServiceTransport): + # transport is a ModelServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Gets information about a specific Model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.GetModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.GetModelRequest): + request = model_service.GetModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListModelsPager: + r"""Lists models available through the API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListModelsRequest, dict]): + The request object. Request for listing all Models. + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at + most 50 models will be returned per page. This method + returns at most 1000 models per page, even if you pass a + larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.model_service.pagers.ListModelsPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.ListModelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.ListModelsRequest): + request = model_service.ListModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_models] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListModelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tuned_model( + self, + request: Optional[Union[model_service.GetTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tuned_model.TunedModel: + r"""Gets information about a specific TunedModel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetTunedModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.GetTunedModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.GetTunedModelRequest): + request = model_service.GetTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tuned_models( + self, + request: Optional[Union[model_service.ListTunedModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTunedModelsPager: + r"""Lists tuned models owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest, dict]): + The request object. Request for listing TunedModels. + page_size (int): + Optional. The maximum number of ``TunedModels`` to + return (per page). The service may return fewer tuned + models. + + If unspecified, at most 10 tuned models will be + returned. This method returns at most 1000 models per + page, even if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided + the page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.model_service.pagers.ListTunedModelsPager: + Response from ListTunedModels containing a paginated + list of Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.ListTunedModelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.ListTunedModelsRequest): + request = model_service.ListTunedModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tuned_models] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTunedModelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tuned_model( + self, + request: Optional[Union[model_service.CreateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + tuned_model_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a tuned model. Intermediate tuning progress (if any) is + accessed through the [google.longrunning.Operations] service. + + Status and results can be accessed through the Operations + service. Example: GET + /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.CreateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreateTunedModelRequest, dict]): + The request object. Request to create a TunedModel. + tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): + Required. The tuned model to create. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tuned_model_id (str): + Optional. The unique id for the tuned model if + specified. This value should be up to 40 characters, the + first character must be a letter, the last could be a + letter or a number. The id must match the regular + expression: `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + + This corresponds to the ``tuned_model_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.ai.generativelanguage_v1beta.types.TunedModel` + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, tuned_model_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.CreateTunedModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.CreateTunedModelRequest): + request = model_service.CreateTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if tuned_model_id is not None: + request.tuned_model_id = tuned_model_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tuned_model] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gag_tuned_model.TunedModel, + metadata_type=model_service.CreateTunedModelMetadata, + ) + + # Done; return the response. + return response + + def update_tuned_model( + self, + request: Optional[Union[model_service.UpdateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Updates a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.UpdateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + response = client.update_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdateTunedModelRequest, dict]): + The request object. Request to update a TunedModel. + tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): + Required. The tuned model to update. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.UpdateTunedModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.UpdateTunedModelRequest): + request = model_service.UpdateTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tuned_model.name", request.tuned_model.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tuned_model( + self, + request: Optional[Union[model_service.DeleteTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + client.delete_tuned_model(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeleteTunedModelRequest, dict]): + The request object. Request to delete a TunedModel. + name (str): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a model_service.DeleteTunedModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, model_service.DeleteTunedModelRequest): + request = model_service.DeleteTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "ModelServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/pagers.py new file mode 100644 index 000000000000..46fe5efdaf33 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1beta.types import model, model_service, tuned_model + + +class ListModelsPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListModelsResponse], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[model.Model]: + for page in self.pages: + yield from page.models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsAsyncPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListModelsResponse]], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[model.Model]: + async def async_generator(): + async for page in self.pages: + for response in page.models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTunedModelsPager: + """A pager for iterating through ``list_tuned_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tuned_models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTunedModels`` requests and continue to iterate + through the ``tuned_models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListTunedModelsResponse], + request: model_service.ListTunedModelsRequest, + response: model_service.ListTunedModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListTunedModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListTunedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tuned_model.TunedModel]: + for page in self.pages: + yield from page.tuned_models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTunedModelsAsyncPager: + """A pager for iterating through ``list_tuned_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tuned_models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTunedModels`` requests and continue to iterate + through the ``tuned_models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListTunedModelsResponse]], + request: model_service.ListTunedModelsRequest, + response: model_service.ListTunedModelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListTunedModelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = model_service.ListTunedModelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListTunedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[tuned_model.TunedModel]: + async def async_generator(): + async for page in self.pages: + for response in page.tuned_models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/__init__.py new file mode 100644 index 000000000000..1b430a25489e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ModelServiceTransport +from .grpc import ModelServiceGrpcTransport +from .grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .rest import ModelServiceRestInterceptor, ModelServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] +_transport_registry["grpc"] = ModelServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ModelServiceRestTransport + +__all__ = ( + "ModelServiceTransport", + "ModelServiceGrpcTransport", + "ModelServiceGrpcAsyncIOTransport", + "ModelServiceRestTransport", + "ModelServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/base.py new file mode 100644 index 000000000000..1e6ab506236e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/base.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ModelServiceTransport(abc.ABC): + """Abstract transport class for ModelService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_model: gapic_v1.method.wrap_method( + self.get_model, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_models: gapic_v1.method.wrap_method( + self.list_models, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_tuned_model: gapic_v1.method.wrap_method( + self.get_tuned_model, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_tuned_models: gapic_v1.method.wrap_method( + self.list_tuned_models, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_tuned_model: gapic_v1.method.wrap_method( + self.create_tuned_model, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_tuned_model: gapic_v1.method.wrap_method( + self.update_tuned_model, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_tuned_model: gapic_v1.method.wrap_method( + self.delete_tuned_model, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_model( + self, + ) -> Callable[ + [model_service.GetModelRequest], Union[model.Model, Awaitable[model.Model]] + ]: + raise NotImplementedError() + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], + Union[ + model_service.ListModelsResponse, + Awaitable[model_service.ListModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_tuned_model( + self, + ) -> Callable[ + [model_service.GetTunedModelRequest], + Union[tuned_model.TunedModel, Awaitable[tuned_model.TunedModel]], + ]: + raise NotImplementedError() + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], + Union[ + model_service.ListTunedModelsResponse, + Awaitable[model_service.ListTunedModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_tuned_model( + self, + ) -> Callable[ + [model_service.CreateTunedModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_tuned_model( + self, + ) -> Callable[ + [model_service.UpdateTunedModelRequest], + Union[gag_tuned_model.TunedModel, Awaitable[gag_tuned_model.TunedModel]], + ]: + raise NotImplementedError() + + @property + def delete_tuned_model( + self, + ) -> Callable[ + [model_service.DeleteTunedModelRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ModelServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py new file mode 100644 index 000000000000..a6cb0699799a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport + + +class ModelServiceGrpcTransport(ModelServiceTransport): + """gRPC backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + ~.Model]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + ~.ListModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_tuned_model( + self, + ) -> Callable[[model_service.GetTunedModelRequest], tuned_model.TunedModel]: + r"""Return a callable for the get tuned model method over gRPC. + + Gets information about a specific TunedModel. + + Returns: + Callable[[~.GetTunedModelRequest], + ~.TunedModel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tuned_model" not in self._stubs: + self._stubs["get_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/GetTunedModel", + request_serializer=model_service.GetTunedModelRequest.serialize, + response_deserializer=tuned_model.TunedModel.deserialize, + ) + return self._stubs["get_tuned_model"] + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], model_service.ListTunedModelsResponse + ]: + r"""Return a callable for the list tuned models method over gRPC. + + Lists tuned models owned by the user. + + Returns: + Callable[[~.ListTunedModelsRequest], + ~.ListTunedModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tuned_models" not in self._stubs: + self._stubs["list_tuned_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/ListTunedModels", + request_serializer=model_service.ListTunedModelsRequest.serialize, + response_deserializer=model_service.ListTunedModelsResponse.deserialize, + ) + return self._stubs["list_tuned_models"] + + @property + def create_tuned_model( + self, + ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: + r"""Return a callable for the create tuned model method over gRPC. + + Creates a tuned model. Intermediate tuning progress (if any) is + accessed through the [google.longrunning.Operations] service. + + Status and results can be accessed through the Operations + service. Example: GET + /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + Returns: + Callable[[~.CreateTunedModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tuned_model" not in self._stubs: + self._stubs["create_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/CreateTunedModel", + request_serializer=model_service.CreateTunedModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tuned_model"] + + @property + def update_tuned_model( + self, + ) -> Callable[[model_service.UpdateTunedModelRequest], gag_tuned_model.TunedModel]: + r"""Return a callable for the update tuned model method over gRPC. + + Updates a tuned model. + + Returns: + Callable[[~.UpdateTunedModelRequest], + ~.TunedModel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tuned_model" not in self._stubs: + self._stubs["update_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/UpdateTunedModel", + request_serializer=model_service.UpdateTunedModelRequest.serialize, + response_deserializer=gag_tuned_model.TunedModel.deserialize, + ) + return self._stubs["update_tuned_model"] + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], empty_pb2.Empty]: + r"""Return a callable for the delete tuned model method over gRPC. + + Deletes a tuned model. + + Returns: + Callable[[~.DeleteTunedModelRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tuned_model" not in self._stubs: + self._stubs["delete_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/DeleteTunedModel", + request_serializer=model_service.DeleteTunedModelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tuned_model"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ModelServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f8e65fe0fb83 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/grpc_asyncio.py @@ -0,0 +1,458 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .grpc import ModelServiceGrpcTransport + + +class ModelServiceGrpcAsyncIOTransport(ModelServiceTransport): + """gRPC AsyncIO backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_model( + self, + ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific Model. + + Returns: + Callable[[~.GetModelRequest], + Awaitable[~.Model]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], Awaitable[model_service.ListModelsResponse] + ]: + r"""Return a callable for the list models method over gRPC. + + Lists models available through the API. + + Returns: + Callable[[~.ListModelsRequest], + Awaitable[~.ListModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_tuned_model( + self, + ) -> Callable[ + [model_service.GetTunedModelRequest], Awaitable[tuned_model.TunedModel] + ]: + r"""Return a callable for the get tuned model method over gRPC. + + Gets information about a specific TunedModel. + + Returns: + Callable[[~.GetTunedModelRequest], + Awaitable[~.TunedModel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tuned_model" not in self._stubs: + self._stubs["get_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/GetTunedModel", + request_serializer=model_service.GetTunedModelRequest.serialize, + response_deserializer=tuned_model.TunedModel.deserialize, + ) + return self._stubs["get_tuned_model"] + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], + Awaitable[model_service.ListTunedModelsResponse], + ]: + r"""Return a callable for the list tuned models method over gRPC. + + Lists tuned models owned by the user. + + Returns: + Callable[[~.ListTunedModelsRequest], + Awaitable[~.ListTunedModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tuned_models" not in self._stubs: + self._stubs["list_tuned_models"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/ListTunedModels", + request_serializer=model_service.ListTunedModelsRequest.serialize, + response_deserializer=model_service.ListTunedModelsResponse.deserialize, + ) + return self._stubs["list_tuned_models"] + + @property + def create_tuned_model( + self, + ) -> Callable[ + [model_service.CreateTunedModelRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create tuned model method over gRPC. + + Creates a tuned model. Intermediate tuning progress (if any) is + accessed through the [google.longrunning.Operations] service. + + Status and results can be accessed through the Operations + service. Example: GET + /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + Returns: + Callable[[~.CreateTunedModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tuned_model" not in self._stubs: + self._stubs["create_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/CreateTunedModel", + request_serializer=model_service.CreateTunedModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tuned_model"] + + @property + def update_tuned_model( + self, + ) -> Callable[ + [model_service.UpdateTunedModelRequest], Awaitable[gag_tuned_model.TunedModel] + ]: + r"""Return a callable for the update tuned model method over gRPC. + + Updates a tuned model. + + Returns: + Callable[[~.UpdateTunedModelRequest], + Awaitable[~.TunedModel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tuned_model" not in self._stubs: + self._stubs["update_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/UpdateTunedModel", + request_serializer=model_service.UpdateTunedModelRequest.serialize, + response_deserializer=gag_tuned_model.TunedModel.deserialize, + ) + return self._stubs["update_tuned_model"] + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tuned model method over gRPC. + + Deletes a tuned model. + + Returns: + Callable[[~.DeleteTunedModelRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tuned_model" not in self._stubs: + self._stubs["delete_tuned_model"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.ModelService/DeleteTunedModel", + request_serializer=model_service.DeleteTunedModelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tuned_model"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ModelServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py new file mode 100644 index 000000000000..73c0afe522ff --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py @@ -0,0 +1,1082 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ModelServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ModelServiceRestInterceptor: + """Interceptor for ModelService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ModelServiceRestTransport. + + .. code-block:: python + class MyCustomModelServiceInterceptor(ModelServiceRestInterceptor): + def pre_create_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tuned_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tuned_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ModelServiceRestTransport(interceptor=MyCustomModelServiceInterceptor()) + client = ModelServiceClient(transport=transport) + + + """ + + def pre_create_tuned_model( + self, + request: model_service.CreateTunedModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.CreateTunedModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_create_tuned_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_tuned_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_delete_tuned_model( + self, + request: model_service.DeleteTunedModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.DeleteTunedModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def pre_get_model( + self, + request: model_service.GetModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.GetModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_model(self, response: model.Model) -> model.Model: + """Post-rpc interceptor for get_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_get_tuned_model( + self, + request: model_service.GetTunedModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.GetTunedModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_tuned_model( + self, response: tuned_model.TunedModel + ) -> tuned_model.TunedModel: + """Post-rpc interceptor for get_tuned_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_models( + self, + request: model_service.ListModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.ListModelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_models( + self, response: model_service.ListModelsResponse + ) -> model_service.ListModelsResponse: + """Post-rpc interceptor for list_models + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_tuned_models( + self, + request: model_service.ListTunedModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.ListTunedModelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tuned_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_tuned_models( + self, response: model_service.ListTunedModelsResponse + ) -> model_service.ListTunedModelsResponse: + """Post-rpc interceptor for list_tuned_models + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_update_tuned_model( + self, + request: model_service.UpdateTunedModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[model_service.UpdateTunedModelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_update_tuned_model( + self, response: gag_tuned_model.TunedModel + ) -> gag_tuned_model.TunedModel: + """Post-rpc interceptor for update_tuned_model + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ModelServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ModelServiceRestInterceptor + + +class ModelServiceRestTransport(ModelServiceTransport): + """REST backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ModelServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ModelServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateTunedModel(ModelServiceRestStub): + def __hash__(self): + return hash("CreateTunedModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.CreateTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create tuned model method over HTTP. + + Args: + request (~.model_service.CreateTunedModelRequest): + The request object. Request to create a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/tunedModels", + "body": "tuned_model", + }, + ] + request, metadata = self._interceptor.pre_create_tuned_model( + request, metadata + ) + pb_request = model_service.CreateTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_tuned_model(resp) + return resp + + class _DeleteTunedModel(ModelServiceRestStub): + def __hash__(self): + return hash("DeleteTunedModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.DeleteTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete tuned model method over HTTP. + + Args: + request (~.model_service.DeleteTunedModelRequest): + The request object. Request to delete a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=tunedModels/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_tuned_model( + request, metadata + ) + pb_request = model_service.DeleteTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetModel(ModelServiceRestStub): + def __hash__(self): + return hash("GetModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.GetModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model.Model: + r"""Call the get model method over HTTP. + + Args: + request (~.model_service.GetModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model.Model: + Information about a Generative + Language Model. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=models/*}", + }, + ] + request, metadata = self._interceptor.pre_get_model(request, metadata) + pb_request = model_service.GetModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model.Model() + pb_resp = model.Model.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_model(resp) + return resp + + class _GetTunedModel(ModelServiceRestStub): + def __hash__(self): + return hash("GetTunedModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.GetTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tuned_model.TunedModel: + r"""Call the get tuned model method over HTTP. + + Args: + request (~.model_service.GetTunedModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.tuned_model.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=tunedModels/*}", + }, + ] + request, metadata = self._interceptor.pre_get_tuned_model(request, metadata) + pb_request = model_service.GetTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tuned_model.TunedModel() + pb_resp = tuned_model.TunedModel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_tuned_model(resp) + return resp + + class _ListModels(ModelServiceRestStub): + def __hash__(self): + return hash("ListModels") + + def __call__( + self, + request: model_service.ListModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model_service.ListModelsResponse: + r"""Call the list models method over HTTP. + + Args: + request (~.model_service.ListModelsRequest): + The request object. Request for listing all Models. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model_service.ListModelsResponse: + Response from ``ListModel`` containing a paginated list + of Models. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/models", + }, + ] + request, metadata = self._interceptor.pre_list_models(request, metadata) + pb_request = model_service.ListModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListModelsResponse() + pb_resp = model_service.ListModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_models(resp) + return resp + + class _ListTunedModels(ModelServiceRestStub): + def __hash__(self): + return hash("ListTunedModels") + + def __call__( + self, + request: model_service.ListTunedModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> model_service.ListTunedModelsResponse: + r"""Call the list tuned models method over HTTP. + + Args: + request (~.model_service.ListTunedModelsRequest): + The request object. Request for listing TunedModels. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.model_service.ListTunedModelsResponse: + Response from ``ListTunedModels`` containing a paginated + list of Models. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/tunedModels", + }, + ] + request, metadata = self._interceptor.pre_list_tuned_models( + request, metadata + ) + pb_request = model_service.ListTunedModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListTunedModelsResponse() + pb_resp = model_service.ListTunedModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tuned_models(resp) + return resp + + class _UpdateTunedModel(ModelServiceRestStub): + def __hash__(self): + return hash("UpdateTunedModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: model_service.UpdateTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Call the update tuned model method over HTTP. + + Args: + request (~.model_service.UpdateTunedModelRequest): + The request object. Request to update a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gag_tuned_model.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{tuned_model.name=tunedModels/*}", + "body": "tuned_model", + }, + ] + request, metadata = self._interceptor.pre_update_tuned_model( + request, metadata + ) + pb_request = model_service.UpdateTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_tuned_model.TunedModel() + pb_resp = gag_tuned_model.TunedModel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_tuned_model(resp) + return resp + + @property + def create_tuned_model( + self, + ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_tuned_model( + self, + ) -> Callable[[model_service.GetTunedModelRequest], tuned_model.TunedModel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], model_service.ListTunedModelsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTunedModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_tuned_model( + self, + ) -> Callable[[model_service.UpdateTunedModelRequest], gag_tuned_model.TunedModel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ModelServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/__init__.py new file mode 100644 index 000000000000..7cd02e1fc232 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PermissionServiceAsyncClient +from .client import PermissionServiceClient + +__all__ = ( + "PermissionServiceClient", + "PermissionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py new file mode 100644 index 000000000000..45bf05885ea0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/async_client.py @@ -0,0 +1,971 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.permission_service import pagers +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +from .client import PermissionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .transports.grpc_asyncio import PermissionServiceGrpcAsyncIOTransport + + +class PermissionServiceAsyncClient: + """Provides methods for managing permissions to PaLM API + resources. + """ + + _client: PermissionServiceClient + + DEFAULT_ENDPOINT = PermissionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + + permission_path = staticmethod(PermissionServiceClient.permission_path) + parse_permission_path = staticmethod(PermissionServiceClient.parse_permission_path) + common_billing_account_path = staticmethod( + PermissionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PermissionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PermissionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PermissionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PermissionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PermissionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PermissionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PermissionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PermissionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PermissionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceAsyncClient: The constructed client. + """ + return PermissionServiceClient.from_service_account_info.__func__(PermissionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceAsyncClient: The constructed client. + """ + return PermissionServiceClient.from_service_account_file.__func__(PermissionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PermissionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PermissionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PermissionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PermissionServiceClient).get_transport_class, type(PermissionServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PermissionServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the permission service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PermissionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PermissionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_permission( + self, + request: Optional[ + Union[permission_service.CreatePermissionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + permission: Optional[gag_permission.Permission] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Create a permission to a specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreatePermissionRequest, dict]]): + The request object. Request to create a ``Permission``. + parent (:class:`str`): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permission (:class:`google.ai.generativelanguage_v1beta.types.Permission`): + Required. The permission to create. + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, permission]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = permission_service.CreatePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if permission is not None: + request.permission = permission + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_permission, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_permission( + self, + request: Optional[Union[permission_service.GetPermissionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission.Permission: + r"""Gets information about a specific Permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetPermissionRequest, dict]]): + The request object. Request for getting information about a specific + ``Permission``. + name (:class:`str`): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = permission_service.GetPermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_permission, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_permissions( + self, + request: Optional[ + Union[permission_service.ListPermissionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPermissionsAsyncPager: + r"""Lists permissions for the specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_permissions(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListPermissionsRequest, dict]]): + The request object. Request for listing permissions. + parent (:class:`str`): + Required. The parent resource of the permissions. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.permission_service.pagers.ListPermissionsAsyncPager: + Response from ListPermissions containing a paginated list of + permissions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = permission_service.ListPermissionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPermissionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_permission( + self, + request: Optional[ + Union[permission_service.UpdatePermissionRequest, dict] + ] = None, + *, + permission: Optional[gag_permission.Permission] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Updates the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdatePermissionRequest( + ) + + # Make the request + response = await client.update_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdatePermissionRequest, dict]]): + The request object. Request to update the ``Permission``. + permission (:class:`google.ai.generativelanguage_v1beta.types.Permission`): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([permission, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = permission_service.UpdatePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if permission is not None: + request.permission = permission + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_permission, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("permission.name", request.permission.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_permission( + self, + request: Optional[ + Union[permission_service.DeletePermissionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + await client.delete_permission(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeletePermissionRequest, dict]]): + The request object. Request to delete the ``Permission``. + name (:class:`str`): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = permission_service.DeletePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_permission, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def transfer_ownership( + self, + request: Optional[ + Union[permission_service.TransferOwnershipRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = await client.transfer_ownership(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.TransferOwnershipRequest, dict]]): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TransferOwnershipResponse: + Response from TransferOwnership. + """ + # Create or coerce a protobuf request object. + request = permission_service.TransferOwnershipRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.transfer_ownership, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PermissionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PermissionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py new file mode 100644 index 000000000000..ffe293de8c42 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py @@ -0,0 +1,1160 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.permission_service import pagers +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +from .transports.base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .transports.grpc import PermissionServiceGrpcTransport +from .transports.grpc_asyncio import PermissionServiceGrpcAsyncIOTransport +from .transports.rest import PermissionServiceRestTransport + + +class PermissionServiceClientMeta(type): + """Metaclass for the PermissionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PermissionServiceTransport]] + _transport_registry["grpc"] = PermissionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PermissionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PermissionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PermissionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PermissionServiceClient(metaclass=PermissionServiceClientMeta): + """Provides methods for managing permissions to PaLM API + resources. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PermissionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PermissionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def permission_path( + tuned_model: str, + permission: str, + ) -> str: + """Returns a fully-qualified permission string.""" + return "tunedModels/{tuned_model}/permissions/{permission}".format( + tuned_model=tuned_model, + permission=permission, + ) + + @staticmethod + def parse_permission_path(path: str) -> Dict[str, str]: + """Parses a permission path into its component segments.""" + m = re.match( + r"^tunedModels/(?P.+?)/permissions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PermissionServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the permission service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PermissionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PermissionServiceTransport): + # transport is a PermissionServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_permission( + self, + request: Optional[ + Union[permission_service.CreatePermissionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + permission: Optional[gag_permission.Permission] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Create a permission to a specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreatePermissionRequest, dict]): + The request object. Request to create a ``Permission``. + parent (str): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permission (google.ai.generativelanguage_v1beta.types.Permission): + Required. The permission to create. + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, permission]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.CreatePermissionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.CreatePermissionRequest): + request = permission_service.CreatePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if permission is not None: + request.permission = permission + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_permission( + self, + request: Optional[Union[permission_service.GetPermissionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission.Permission: + r"""Gets information about a specific Permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = client.get_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetPermissionRequest, dict]): + The request object. Request for getting information about a specific + ``Permission``. + name (str): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.GetPermissionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.GetPermissionRequest): + request = permission_service.GetPermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_permissions( + self, + request: Optional[ + Union[permission_service.ListPermissionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPermissionsPager: + r"""Lists permissions for the specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_permissions(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListPermissionsRequest, dict]): + The request object. Request for listing permissions. + parent (str): + Required. The parent resource of the permissions. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.permission_service.pagers.ListPermissionsPager: + Response from ListPermissions containing a paginated list of + permissions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.ListPermissionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.ListPermissionsRequest): + request = permission_service.ListPermissionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPermissionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_permission( + self, + request: Optional[ + Union[permission_service.UpdatePermissionRequest, dict] + ] = None, + *, + permission: Optional[gag_permission.Permission] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Updates the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdatePermissionRequest( + ) + + # Make the request + response = client.update_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdatePermissionRequest, dict]): + The request object. Request to update the ``Permission``. + permission (google.ai.generativelanguage_v1beta.types.Permission): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([permission, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.UpdatePermissionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.UpdatePermissionRequest): + request = permission_service.UpdatePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if permission is not None: + request.permission = permission + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("permission.name", request.permission.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_permission( + self, + request: Optional[ + Union[permission_service.DeletePermissionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + client.delete_permission(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeletePermissionRequest, dict]): + The request object. Request to delete the ``Permission``. + name (str): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.DeletePermissionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.DeletePermissionRequest): + request = permission_service.DeletePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def transfer_ownership( + self, + request: Optional[ + Union[permission_service.TransferOwnershipRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = client.transfer_ownership(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.TransferOwnershipRequest, dict]): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.TransferOwnershipResponse: + Response from TransferOwnership. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a permission_service.TransferOwnershipRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, permission_service.TransferOwnershipRequest): + request = permission_service.TransferOwnershipRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.transfer_ownership] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PermissionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PermissionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/pagers.py new file mode 100644 index 000000000000..f5c60ec9ea8d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1beta.types import permission, permission_service + + +class ListPermissionsPager: + """A pager for iterating through ``list_permissions`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListPermissionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``permissions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPermissions`` requests and continue to iterate + through the ``permissions`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListPermissionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., permission_service.ListPermissionsResponse], + request: permission_service.ListPermissionsRequest, + response: permission_service.ListPermissionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListPermissionsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListPermissionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = permission_service.ListPermissionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[permission_service.ListPermissionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[permission.Permission]: + for page in self.pages: + yield from page.permissions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPermissionsAsyncPager: + """A pager for iterating through ``list_permissions`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListPermissionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``permissions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPermissions`` requests and continue to iterate + through the ``permissions`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListPermissionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[permission_service.ListPermissionsResponse]], + request: permission_service.ListPermissionsRequest, + response: permission_service.ListPermissionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListPermissionsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListPermissionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = permission_service.ListPermissionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[permission_service.ListPermissionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[permission.Permission]: + async def async_generator(): + async for page in self.pages: + for response in page.permissions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/__init__.py new file mode 100644 index 000000000000..fe33568492a6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PermissionServiceTransport +from .grpc import PermissionServiceGrpcTransport +from .grpc_asyncio import PermissionServiceGrpcAsyncIOTransport +from .rest import PermissionServiceRestInterceptor, PermissionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PermissionServiceTransport]] +_transport_registry["grpc"] = PermissionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PermissionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PermissionServiceRestTransport + +__all__ = ( + "PermissionServiceTransport", + "PermissionServiceGrpcTransport", + "PermissionServiceGrpcAsyncIOTransport", + "PermissionServiceRestTransport", + "PermissionServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/base.py new file mode 100644 index 000000000000..b0b11618fb9a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/base.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PermissionServiceTransport(abc.ABC): + """Abstract transport class for PermissionService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_permission: gapic_v1.method.wrap_method( + self.create_permission, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_permission: gapic_v1.method.wrap_method( + self.get_permission, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_permissions: gapic_v1.method.wrap_method( + self.list_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update_permission: gapic_v1.method.wrap_method( + self.update_permission, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_permission: gapic_v1.method.wrap_method( + self.delete_permission, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.transfer_ownership: gapic_v1.method.wrap_method( + self.transfer_ownership, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], + Union[gag_permission.Permission, Awaitable[gag_permission.Permission]], + ]: + raise NotImplementedError() + + @property + def get_permission( + self, + ) -> Callable[ + [permission_service.GetPermissionRequest], + Union[permission.Permission, Awaitable[permission.Permission]], + ]: + raise NotImplementedError() + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + Union[ + permission_service.ListPermissionsResponse, + Awaitable[permission_service.ListPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], + Union[gag_permission.Permission, Awaitable[gag_permission.Permission]], + ]: + raise NotImplementedError() + + @property + def delete_permission( + self, + ) -> Callable[ + [permission_service.DeletePermissionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + Union[ + permission_service.TransferOwnershipResponse, + Awaitable[permission_service.TransferOwnershipResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PermissionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc.py new file mode 100644 index 000000000000..6237599bb957 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc.py @@ -0,0 +1,413 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +from .base import DEFAULT_CLIENT_INFO, PermissionServiceTransport + + +class PermissionServiceGrpcTransport(PermissionServiceTransport): + """gRPC backend transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], gag_permission.Permission + ]: + r"""Return a callable for the create permission method over gRPC. + + Create a permission to a specific resource. + + Returns: + Callable[[~.CreatePermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_permission" not in self._stubs: + self._stubs["create_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/CreatePermission", + request_serializer=permission_service.CreatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["create_permission"] + + @property + def get_permission( + self, + ) -> Callable[[permission_service.GetPermissionRequest], permission.Permission]: + r"""Return a callable for the get permission method over gRPC. + + Gets information about a specific Permission. + + Returns: + Callable[[~.GetPermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_permission" not in self._stubs: + self._stubs["get_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/GetPermission", + request_serializer=permission_service.GetPermissionRequest.serialize, + response_deserializer=permission.Permission.deserialize, + ) + return self._stubs["get_permission"] + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + permission_service.ListPermissionsResponse, + ]: + r"""Return a callable for the list permissions method over gRPC. + + Lists permissions for the specific resource. + + Returns: + Callable[[~.ListPermissionsRequest], + ~.ListPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_permissions" not in self._stubs: + self._stubs["list_permissions"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/ListPermissions", + request_serializer=permission_service.ListPermissionsRequest.serialize, + response_deserializer=permission_service.ListPermissionsResponse.deserialize, + ) + return self._stubs["list_permissions"] + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], gag_permission.Permission + ]: + r"""Return a callable for the update permission method over gRPC. + + Updates the permission. + + Returns: + Callable[[~.UpdatePermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_permission" not in self._stubs: + self._stubs["update_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/UpdatePermission", + request_serializer=permission_service.UpdatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["update_permission"] + + @property + def delete_permission( + self, + ) -> Callable[[permission_service.DeletePermissionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete permission method over gRPC. + + Deletes the permission. + + Returns: + Callable[[~.DeletePermissionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_permission" not in self._stubs: + self._stubs["delete_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/DeletePermission", + request_serializer=permission_service.DeletePermissionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_permission"] + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + permission_service.TransferOwnershipResponse, + ]: + r"""Return a callable for the transfer ownership method over gRPC. + + Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + Returns: + Callable[[~.TransferOwnershipRequest], + ~.TransferOwnershipResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_ownership" not in self._stubs: + self._stubs["transfer_ownership"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/TransferOwnership", + request_serializer=permission_service.TransferOwnershipRequest.serialize, + response_deserializer=permission_service.TransferOwnershipResponse.deserialize, + ) + return self._stubs["transfer_ownership"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PermissionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..50a0f11a537a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/grpc_asyncio.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +from .base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .grpc import PermissionServiceGrpcTransport + + +class PermissionServiceGrpcAsyncIOTransport(PermissionServiceTransport): + """gRPC AsyncIO backend transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], + Awaitable[gag_permission.Permission], + ]: + r"""Return a callable for the create permission method over gRPC. + + Create a permission to a specific resource. + + Returns: + Callable[[~.CreatePermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_permission" not in self._stubs: + self._stubs["create_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/CreatePermission", + request_serializer=permission_service.CreatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["create_permission"] + + @property + def get_permission( + self, + ) -> Callable[ + [permission_service.GetPermissionRequest], Awaitable[permission.Permission] + ]: + r"""Return a callable for the get permission method over gRPC. + + Gets information about a specific Permission. + + Returns: + Callable[[~.GetPermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_permission" not in self._stubs: + self._stubs["get_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/GetPermission", + request_serializer=permission_service.GetPermissionRequest.serialize, + response_deserializer=permission.Permission.deserialize, + ) + return self._stubs["get_permission"] + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + Awaitable[permission_service.ListPermissionsResponse], + ]: + r"""Return a callable for the list permissions method over gRPC. + + Lists permissions for the specific resource. + + Returns: + Callable[[~.ListPermissionsRequest], + Awaitable[~.ListPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_permissions" not in self._stubs: + self._stubs["list_permissions"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/ListPermissions", + request_serializer=permission_service.ListPermissionsRequest.serialize, + response_deserializer=permission_service.ListPermissionsResponse.deserialize, + ) + return self._stubs["list_permissions"] + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], + Awaitable[gag_permission.Permission], + ]: + r"""Return a callable for the update permission method over gRPC. + + Updates the permission. + + Returns: + Callable[[~.UpdatePermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_permission" not in self._stubs: + self._stubs["update_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/UpdatePermission", + request_serializer=permission_service.UpdatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["update_permission"] + + @property + def delete_permission( + self, + ) -> Callable[ + [permission_service.DeletePermissionRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete permission method over gRPC. + + Deletes the permission. + + Returns: + Callable[[~.DeletePermissionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_permission" not in self._stubs: + self._stubs["delete_permission"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/DeletePermission", + request_serializer=permission_service.DeletePermissionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_permission"] + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + Awaitable[permission_service.TransferOwnershipResponse], + ]: + r"""Return a callable for the transfer ownership method over gRPC. + + Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + Returns: + Callable[[~.TransferOwnershipRequest], + Awaitable[~.TransferOwnershipResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_ownership" not in self._stubs: + self._stubs["transfer_ownership"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PermissionService/TransferOwnership", + request_serializer=permission_service.TransferOwnershipRequest.serialize, + response_deserializer=permission_service.TransferOwnershipResponse.deserialize, + ) + return self._stubs["transfer_ownership"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PermissionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py new file mode 100644 index 000000000000..b449cba24111 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py @@ -0,0 +1,1050 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PermissionServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PermissionServiceRestInterceptor: + """Interceptor for PermissionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PermissionServiceRestTransport. + + .. code-block:: python + class MyCustomPermissionServiceInterceptor(PermissionServiceRestInterceptor): + def pre_create_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_permission(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_permission(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_transfer_ownership(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_transfer_ownership(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_permission(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PermissionServiceRestTransport(interceptor=MyCustomPermissionServiceInterceptor()) + client = PermissionServiceClient(transport=transport) + + + """ + + def pre_create_permission( + self, + request: permission_service.CreatePermissionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.CreatePermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_create_permission( + self, response: gag_permission.Permission + ) -> gag_permission.Permission: + """Post-rpc interceptor for create_permission + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + def pre_delete_permission( + self, + request: permission_service.DeletePermissionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.DeletePermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def pre_get_permission( + self, + request: permission_service.GetPermissionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.GetPermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_get_permission( + self, response: permission.Permission + ) -> permission.Permission: + """Post-rpc interceptor for get_permission + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + def pre_list_permissions( + self, + request: permission_service.ListPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.ListPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_list_permissions( + self, response: permission_service.ListPermissionsResponse + ) -> permission_service.ListPermissionsResponse: + """Post-rpc interceptor for list_permissions + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + def pre_transfer_ownership( + self, + request: permission_service.TransferOwnershipRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.TransferOwnershipRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for transfer_ownership + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_transfer_ownership( + self, response: permission_service.TransferOwnershipResponse + ) -> permission_service.TransferOwnershipResponse: + """Post-rpc interceptor for transfer_ownership + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + def pre_update_permission( + self, + request: permission_service.UpdatePermissionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[permission_service.UpdatePermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_update_permission( + self, response: gag_permission.Permission + ) -> gag_permission.Permission: + """Post-rpc interceptor for update_permission + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PermissionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PermissionServiceRestInterceptor + + +class PermissionServiceRestTransport(PermissionServiceTransport): + """REST backend transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PermissionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PermissionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreatePermission(PermissionServiceRestStub): + def __hash__(self): + return hash("CreatePermission") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.CreatePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Call the create permission method over HTTP. + + Args: + request (~.permission_service.CreatePermissionRequest): + The request object. Request to create a ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gag_permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=tunedModels/*}/permissions", + "body": "permission", + }, + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*}/permissions", + "body": "permission", + }, + ] + request, metadata = self._interceptor.pre_create_permission( + request, metadata + ) + pb_request = permission_service.CreatePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_permission.Permission() + pb_resp = gag_permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_permission(resp) + return resp + + class _DeletePermission(PermissionServiceRestStub): + def __hash__(self): + return hash("DeletePermission") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.DeletePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete permission method over HTTP. + + Args: + request (~.permission_service.DeletePermissionRequest): + The request object. Request to delete the ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=tunedModels/*/permissions/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=corpora/*/permissions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_permission( + request, metadata + ) + pb_request = permission_service.DeletePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetPermission(PermissionServiceRestStub): + def __hash__(self): + return hash("GetPermission") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.GetPermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission.Permission: + r"""Call the get permission method over HTTP. + + Args: + request (~.permission_service.GetPermissionRequest): + The request object. Request for getting information about a specific + ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=tunedModels/*/permissions/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=corpora/*/permissions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_permission(request, metadata) + pb_request = permission_service.GetPermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission.Permission() + pb_resp = permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_permission(resp) + return resp + + class _ListPermissions(PermissionServiceRestStub): + def __hash__(self): + return hash("ListPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.ListPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission_service.ListPermissionsResponse: + r"""Call the list permissions method over HTTP. + + Args: + request (~.permission_service.ListPermissionsRequest): + The request object. Request for listing permissions. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.permission_service.ListPermissionsResponse: + Response from ``ListPermissions`` containing a paginated + list of permissions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=tunedModels/*}/permissions", + }, + { + "method": "get", + "uri": "/v1beta/{parent=corpora/*}/permissions", + }, + ] + request, metadata = self._interceptor.pre_list_permissions( + request, metadata + ) + pb_request = permission_service.ListPermissionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission_service.ListPermissionsResponse() + pb_resp = permission_service.ListPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_permissions(resp) + return resp + + class _TransferOwnership(PermissionServiceRestStub): + def __hash__(self): + return hash("TransferOwnership") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.TransferOwnershipRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Call the transfer ownership method over HTTP. + + Args: + request (~.permission_service.TransferOwnershipRequest): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.permission_service.TransferOwnershipResponse: + Response from ``TransferOwnership``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=tunedModels/*}:transferOwnership", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_transfer_ownership( + request, metadata + ) + pb_request = permission_service.TransferOwnershipRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission_service.TransferOwnershipResponse() + pb_resp = permission_service.TransferOwnershipResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_transfer_ownership(resp) + return resp + + class _UpdatePermission(PermissionServiceRestStub): + def __hash__(self): + return hash("UpdatePermission") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: permission_service.UpdatePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gag_permission.Permission: + r"""Call the update permission method over HTTP. + + Args: + request (~.permission_service.UpdatePermissionRequest): + The request object. Request to update the ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gag_permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{permission.name=tunedModels/*/permissions/*}", + "body": "permission", + }, + { + "method": "patch", + "uri": "/v1beta/{permission.name=corpora/*/permissions/*}", + "body": "permission", + }, + ] + request, metadata = self._interceptor.pre_update_permission( + request, metadata + ) + pb_request = permission_service.UpdatePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_permission.Permission() + pb_resp = gag_permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_permission(resp) + return resp + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], gag_permission.Permission + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_permission( + self, + ) -> Callable[[permission_service.DeletePermissionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_permission( + self, + ) -> Callable[[permission_service.GetPermissionRequest], permission.Permission]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + permission_service.ListPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + permission_service.TransferOwnershipResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TransferOwnership(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], gag_permission.Permission + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PermissionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/__init__.py new file mode 100644 index 000000000000..57b35858559c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import RetrieverServiceAsyncClient +from .client import RetrieverServiceClient + +__all__ = ( + "RetrieverServiceClient", + "RetrieverServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py new file mode 100644 index 000000000000..74b278b12262 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/async_client.py @@ -0,0 +1,2333 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.retriever_service import pagers +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +from .client import RetrieverServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .transports.grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport + + +class RetrieverServiceAsyncClient: + """An API for semantic search over a corpus of user uploaded + content. + """ + + _client: RetrieverServiceClient + + DEFAULT_ENDPOINT = RetrieverServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + + chunk_path = staticmethod(RetrieverServiceClient.chunk_path) + parse_chunk_path = staticmethod(RetrieverServiceClient.parse_chunk_path) + corpus_path = staticmethod(RetrieverServiceClient.corpus_path) + parse_corpus_path = staticmethod(RetrieverServiceClient.parse_corpus_path) + document_path = staticmethod(RetrieverServiceClient.document_path) + parse_document_path = staticmethod(RetrieverServiceClient.parse_document_path) + common_billing_account_path = staticmethod( + RetrieverServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RetrieverServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(RetrieverServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + RetrieverServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + RetrieverServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + RetrieverServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(RetrieverServiceClient.common_project_path) + parse_common_project_path = staticmethod( + RetrieverServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(RetrieverServiceClient.common_location_path) + parse_common_location_path = staticmethod( + RetrieverServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceAsyncClient: The constructed client. + """ + return RetrieverServiceClient.from_service_account_info.__func__(RetrieverServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceAsyncClient: The constructed client. + """ + return RetrieverServiceClient.from_service_account_file.__func__(RetrieverServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RetrieverServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RetrieverServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RetrieverServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(RetrieverServiceClient).get_transport_class, type(RetrieverServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, RetrieverServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the retriever service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.RetrieverServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RetrieverServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_corpus( + self, + request: Optional[Union[retriever_service.CreateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Creates an empty ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCorpusRequest( + ) + + # Make the request + response = await client.create_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreateCorpusRequest, dict]]): + The request object. Request to create a ``Corpus``. + corpus (:class:`google.ai.generativelanguage_v1beta.types.Corpus`): + Required. The ``Corpus`` to create. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.CreateCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_corpus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_corpus( + self, + request: Optional[Union[retriever_service.GetCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Gets information about a specific ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetCorpusRequest, dict]]): + The request object. Request for getting information about a specific + ``Corpus``. + name (:class:`str`): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.GetCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_corpus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_corpus( + self, + request: Optional[Union[retriever_service.UpdateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Updates a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCorpusRequest( + ) + + # Make the request + response = await client.update_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdateCorpusRequest, dict]]): + The request object. Request to update a ``Corpus``. + corpus (:class:`google.ai.generativelanguage_v1beta.types.Corpus`): + Required. The ``Corpus`` to update. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``display_name``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.UpdateCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_corpus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("corpus.name", request.corpus.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_corpus( + self, + request: Optional[Union[retriever_service.DeleteCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + await client.delete_corpus(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeleteCorpusRequest, dict]]): + The request object. Request to delete a ``Corpus``. + name (:class:`str`): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.DeleteCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_corpus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_corpora( + self, + request: Optional[Union[retriever_service.ListCorporaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCorporaAsyncPager: + r"""Lists all ``Corpora`` owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_corpora(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListCorporaRequest, dict]]): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListCorporaAsyncPager: + Response from ListCorpora containing a paginated list of Corpora. + The results are sorted by ascending + corpus.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = retriever_service.ListCorporaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_corpora, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCorporaAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_corpus( + self, + request: Optional[Union[retriever_service.QueryCorpusRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Performs semantic search over a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_query_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.QueryCorpusRequest, dict]]): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.QueryCorpusResponse: + Response from QueryCorpus containing a list of relevant + chunks. + + """ + # Create or coerce a protobuf request object. + request = retriever_service.QueryCorpusRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_corpus, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_document( + self, + request: Optional[Union[retriever_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[retriever.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Creates an empty ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreateDocumentRequest, dict]]): + The request object. Request to create a ``Document``. + parent (:class:`str`): + Required. The name of the ``Corpus`` where this + ``Document`` will be created. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (:class:`google.ai.generativelanguage_v1beta.types.Document`): + Required. The ``Document`` to create. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.CreateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_document, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_document( + self, + request: Optional[Union[retriever_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Gets information about a specific ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetDocumentRequest, dict]]): + The request object. Request for getting information about a specific + ``Document``. + name (:class:`str`): + Required. The name of the ``Document`` to retrieve. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.GetDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_document, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_document( + self, + request: Optional[Union[retriever_service.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[retriever.Document] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Updates a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdateDocumentRequest, dict]]): + The request object. Request to update a ``Document``. + document (:class:`google.ai.generativelanguage_v1beta.types.Document`): + Required. The ``Document`` to update. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``display_name`` and + ``custom_metadata``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.UpdateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_document, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_document( + self, + request: Optional[Union[retriever_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeleteDocumentRequest, dict]]): + The request object. Request to delete a ``Document``. + name (:class:`str`): + Required. The resource name of the ``Document`` to + delete. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.DeleteDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_document, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_documents( + self, + request: Optional[Union[retriever_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsAsyncPager: + r"""Lists all ``Document``\ s in a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_documents(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListDocumentsRequest, dict]]): + The request object. Request for listing ``Document``\ s. + parent (:class:`str`): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListDocumentsAsyncPager: + Response from ListDocuments containing a paginated list of Documents. + The Documents are sorted by ascending + document.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.ListDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_documents, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDocumentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_document( + self, + request: Optional[Union[retriever_service.QueryDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Performs semantic search over a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_query_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.QueryDocumentRequest, dict]]): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.QueryDocumentResponse: + Response from QueryDocument containing a list of + relevant chunks. + + """ + # Create or coerce a protobuf request object. + request = retriever_service.QueryDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_document, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_chunk( + self, + request: Optional[Union[retriever_service.CreateChunkRequest, dict]] = None, + *, + parent: Optional[str] = None, + chunk: Optional[retriever.Chunk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Creates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_create_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = await client.create_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CreateChunkRequest, dict]]): + The request object. Request to create a ``Chunk``. + parent (:class:`str`): + Required. The name of the ``Document`` where this + ``Chunk`` will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + chunk (:class:`google.ai.generativelanguage_v1beta.types.Chunk`): + Required. The ``Chunk`` to create. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, chunk]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.CreateChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if chunk is not None: + request.chunk = chunk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_chunk, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_chunks( + self, + request: Optional[ + Union[retriever_service.BatchCreateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Batch create ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_create_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.BatchCreateChunksRequest, dict]]): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchCreateChunksResponse: + Response from BatchCreateChunks containing a list of + created Chunks. + + """ + # Create or coerce a protobuf request object. + request = retriever_service.BatchCreateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_chunks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_chunk( + self, + request: Optional[Union[retriever_service.GetChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Gets information about a specific ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_get_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GetChunkRequest, dict]]): + The request object. Request for getting information about a specific + ``Chunk``. + name (:class:`str`): + Required. The name of the ``Chunk`` to retrieve. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.GetChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_chunk, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_chunk( + self, + request: Optional[Union[retriever_service.UpdateChunkRequest, dict]] = None, + *, + chunk: Optional[retriever.Chunk] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Updates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_update_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = await client.update_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.UpdateChunkRequest, dict]]): + The request object. Request to update a ``Chunk``. + chunk (:class:`google.ai.generativelanguage_v1beta.types.Chunk`): + Required. The ``Chunk`` to update. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``custom_metadata`` and ``data``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([chunk, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.UpdateChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if chunk is not None: + request.chunk = chunk + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_chunk, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("chunk.name", request.chunk.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_update_chunks( + self, + request: Optional[ + Union[retriever_service.BatchUpdateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Batch update ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_update_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.BatchUpdateChunksRequest, dict]]): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchUpdateChunksResponse: + Response from BatchUpdateChunks containing a list of + updated Chunks. + + """ + # Create or coerce a protobuf request object. + request = retriever_service.BatchUpdateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_update_chunks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_chunk( + self, + request: Optional[Union[retriever_service.DeleteChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + await client.delete_chunk(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.DeleteChunkRequest, dict]]): + The request object. Request to delete a ``Chunk``. + name (:class:`str`): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.DeleteChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_chunk, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def batch_delete_chunks( + self, + request: Optional[ + Union[retriever_service.BatchDeleteChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Batch delete ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1beta.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + await client.batch_delete_chunks(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.BatchDeleteChunksRequest, dict]]): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = retriever_service.BatchDeleteChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_delete_chunks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_chunks( + self, + request: Optional[Union[retriever_service.ListChunksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChunksAsyncPager: + r"""Lists all ``Chunk``\ s in a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_list_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.ListChunksRequest, dict]]): + The request object. Request for listing ``Chunk``\ s. + parent (:class:`str`): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListChunksAsyncPager: + Response from ListChunks containing a paginated list of Chunks. + The Chunks are sorted by ascending chunk.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = retriever_service.ListChunksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_chunks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChunksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RetrieverServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RetrieverServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py new file mode 100644 index 000000000000..96ff948438c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py @@ -0,0 +1,2459 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.services.retriever_service import pagers +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +from .transports.base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .transports.grpc import RetrieverServiceGrpcTransport +from .transports.grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport +from .transports.rest import RetrieverServiceRestTransport + + +class RetrieverServiceClientMeta(type): + """Metaclass for the RetrieverService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[RetrieverServiceTransport]] + _transport_registry["grpc"] = RetrieverServiceGrpcTransport + _transport_registry["grpc_asyncio"] = RetrieverServiceGrpcAsyncIOTransport + _transport_registry["rest"] = RetrieverServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RetrieverServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RetrieverServiceClient(metaclass=RetrieverServiceClientMeta): + """An API for semantic search over a corpus of user uploaded + content. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RetrieverServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RetrieverServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def chunk_path( + corpus: str, + document: str, + chunk: str, + ) -> str: + """Returns a fully-qualified chunk string.""" + return "corpora/{corpus}/documents/{document}/chunks/{chunk}".format( + corpus=corpus, + document=document, + chunk=chunk, + ) + + @staticmethod + def parse_chunk_path(path: str) -> Dict[str, str]: + """Parses a chunk path into its component segments.""" + m = re.match( + r"^corpora/(?P.+?)/documents/(?P.+?)/chunks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def corpus_path( + corpus: str, + ) -> str: + """Returns a fully-qualified corpus string.""" + return "corpora/{corpus}".format( + corpus=corpus, + ) + + @staticmethod + def parse_corpus_path(path: str) -> Dict[str, str]: + """Parses a corpus path into its component segments.""" + m = re.match(r"^corpora/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + corpus: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "corpora/{corpus}/documents/{document}".format( + corpus=corpus, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match(r"^corpora/(?P.+?)/documents/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RetrieverServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the retriever service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RetrieverServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RetrieverServiceTransport): + # transport is a RetrieverServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_corpus( + self, + request: Optional[Union[retriever_service.CreateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Creates an empty ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCorpusRequest( + ) + + # Make the request + response = client.create_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreateCorpusRequest, dict]): + The request object. Request to create a ``Corpus``. + corpus (google.ai.generativelanguage_v1beta.types.Corpus): + Required. The ``Corpus`` to create. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.CreateCorpusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.CreateCorpusRequest): + request = retriever_service.CreateCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_corpus] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_corpus( + self, + request: Optional[Union[retriever_service.GetCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Gets information about a specific ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = client.get_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetCorpusRequest, dict]): + The request object. Request for getting information about a specific + ``Corpus``. + name (str): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.GetCorpusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.GetCorpusRequest): + request = retriever_service.GetCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_corpus( + self, + request: Optional[Union[retriever_service.UpdateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Updates a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCorpusRequest( + ) + + # Make the request + response = client.update_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdateCorpusRequest, dict]): + The request object. Request to update a ``Corpus``. + corpus (google.ai.generativelanguage_v1beta.types.Corpus): + Required. The ``Corpus`` to update. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``display_name``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.UpdateCorpusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.UpdateCorpusRequest): + request = retriever_service.UpdateCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("corpus.name", request.corpus.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_corpus( + self, + request: Optional[Union[retriever_service.DeleteCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + client.delete_corpus(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeleteCorpusRequest, dict]): + The request object. Request to delete a ``Corpus``. + name (str): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.DeleteCorpusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.DeleteCorpusRequest): + request = retriever_service.DeleteCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_corpora( + self, + request: Optional[Union[retriever_service.ListCorporaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCorporaPager: + r"""Lists all ``Corpora`` owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_corpora(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListCorporaRequest, dict]): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListCorporaPager: + Response from ListCorpora containing a paginated list of Corpora. + The results are sorted by ascending + corpus.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.ListCorporaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.ListCorporaRequest): + request = retriever_service.ListCorporaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_corpora] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCorporaPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_corpus( + self, + request: Optional[Union[retriever_service.QueryCorpusRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Performs semantic search over a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_query_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.QueryCorpusRequest, dict]): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.QueryCorpusResponse: + Response from QueryCorpus containing a list of relevant + chunks. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.QueryCorpusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.QueryCorpusRequest): + request = retriever_service.QueryCorpusRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_document( + self, + request: Optional[Union[retriever_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[retriever.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Creates an empty ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreateDocumentRequest, dict]): + The request object. Request to create a ``Document``. + parent (str): + Required. The name of the ``Corpus`` where this + ``Document`` will be created. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (google.ai.generativelanguage_v1beta.types.Document): + Required. The ``Document`` to create. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.CreateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.CreateDocumentRequest): + request = retriever_service.CreateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_document( + self, + request: Optional[Union[retriever_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Gets information about a specific ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetDocumentRequest, dict]): + The request object. Request for getting information about a specific + ``Document``. + name (str): + Required. The name of the ``Document`` to retrieve. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.GetDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.GetDocumentRequest): + request = retriever_service.GetDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_document( + self, + request: Optional[Union[retriever_service.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[retriever.Document] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Updates a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdateDocumentRequest, dict]): + The request object. Request to update a ``Document``. + document (google.ai.generativelanguage_v1beta.types.Document): + Required. The ``Document`` to update. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``display_name`` and + ``custom_metadata``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.UpdateDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.UpdateDocumentRequest): + request = retriever_service.UpdateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_document( + self, + request: Optional[Union[retriever_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeleteDocumentRequest, dict]): + The request object. Request to delete a ``Document``. + name (str): + Required. The resource name of the ``Document`` to + delete. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.DeleteDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.DeleteDocumentRequest): + request = retriever_service.DeleteDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_documents( + self, + request: Optional[Union[retriever_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsPager: + r"""Lists all ``Document``\ s in a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_documents(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListDocumentsRequest, dict]): + The request object. Request for listing ``Document``\ s. + parent (str): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListDocumentsPager: + Response from ListDocuments containing a paginated list of Documents. + The Documents are sorted by ascending + document.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.ListDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.ListDocumentsRequest): + request = retriever_service.ListDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDocumentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_document( + self, + request: Optional[Union[retriever_service.QueryDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Performs semantic search over a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_query_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.QueryDocumentRequest, dict]): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.QueryDocumentResponse: + Response from QueryDocument containing a list of + relevant chunks. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.QueryDocumentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.QueryDocumentRequest): + request = retriever_service.QueryDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_chunk( + self, + request: Optional[Union[retriever_service.CreateChunkRequest, dict]] = None, + *, + parent: Optional[str] = None, + chunk: Optional[retriever.Chunk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Creates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_create_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = client.create_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CreateChunkRequest, dict]): + The request object. Request to create a ``Chunk``. + parent (str): + Required. The name of the ``Document`` where this + ``Chunk`` will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + chunk (google.ai.generativelanguage_v1beta.types.Chunk): + Required. The ``Chunk`` to create. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, chunk]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.CreateChunkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.CreateChunkRequest): + request = retriever_service.CreateChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if chunk is not None: + request.chunk = chunk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_chunks( + self, + request: Optional[ + Union[retriever_service.BatchCreateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Batch create ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_create_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.BatchCreateChunksRequest, dict]): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchCreateChunksResponse: + Response from BatchCreateChunks containing a list of + created Chunks. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.BatchCreateChunksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.BatchCreateChunksRequest): + request = retriever_service.BatchCreateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_chunk( + self, + request: Optional[Union[retriever_service.GetChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Gets information about a specific ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_get_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = client.get_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GetChunkRequest, dict]): + The request object. Request for getting information about a specific + ``Chunk``. + name (str): + Required. The name of the ``Chunk`` to retrieve. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.GetChunkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.GetChunkRequest): + request = retriever_service.GetChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_chunk( + self, + request: Optional[Union[retriever_service.UpdateChunkRequest, dict]] = None, + *, + chunk: Optional[retriever.Chunk] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Updates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_update_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = client.update_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.UpdateChunkRequest, dict]): + The request object. Request to update a ``Chunk``. + chunk (google.ai.generativelanguage_v1beta.types.Chunk): + Required. The ``Chunk`` to update. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``custom_metadata`` and ``data``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([chunk, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.UpdateChunkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.UpdateChunkRequest): + request = retriever_service.UpdateChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if chunk is not None: + request.chunk = chunk + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("chunk.name", request.chunk.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_chunks( + self, + request: Optional[ + Union[retriever_service.BatchUpdateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Batch update ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_update_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.BatchUpdateChunksRequest, dict]): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchUpdateChunksResponse: + Response from BatchUpdateChunks containing a list of + updated Chunks. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.BatchUpdateChunksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.BatchUpdateChunksRequest): + request = retriever_service.BatchUpdateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_update_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_chunk( + self, + request: Optional[Union[retriever_service.DeleteChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + client.delete_chunk(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.DeleteChunkRequest, dict]): + The request object. Request to delete a ``Chunk``. + name (str): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.DeleteChunkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.DeleteChunkRequest): + request = retriever_service.DeleteChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_delete_chunks( + self, + request: Optional[ + Union[retriever_service.BatchDeleteChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Batch delete ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1beta.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + client.batch_delete_chunks(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.BatchDeleteChunksRequest, dict]): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.BatchDeleteChunksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.BatchDeleteChunksRequest): + request = retriever_service.BatchDeleteChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_delete_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_chunks( + self, + request: Optional[Union[retriever_service.ListChunksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChunksPager: + r"""Lists all ``Chunk``\ s in a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_list_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.ListChunksRequest, dict]): + The request object. Request for listing ``Chunk``\ s. + parent (str): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListChunksPager: + Response from ListChunks containing a paginated list of Chunks. + The Chunks are sorted by ascending chunk.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a retriever_service.ListChunksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, retriever_service.ListChunksRequest): + request = retriever_service.ListChunksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChunksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RetrieverServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RetrieverServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/pagers.py new file mode 100644 index 000000000000..98410f32afbf --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/pagers.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + + +class ListCorporaPager: + """A pager for iterating through ``list_corpora`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListCorporaResponse` object, and + provides an ``__iter__`` method to iterate through its + ``corpora`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCorpora`` requests and continue to iterate + through the ``corpora`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListCorporaResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListCorporaResponse], + request: retriever_service.ListCorporaRequest, + response: retriever_service.ListCorporaResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListCorporaRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListCorporaResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListCorporaRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListCorporaResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[retriever.Corpus]: + for page in self.pages: + yield from page.corpora + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCorporaAsyncPager: + """A pager for iterating through ``list_corpora`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListCorporaResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``corpora`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCorpora`` requests and continue to iterate + through the ``corpora`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListCorporaResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListCorporaResponse]], + request: retriever_service.ListCorporaRequest, + response: retriever_service.ListCorporaResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListCorporaRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListCorporaResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListCorporaRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListCorporaResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Corpus]: + async def async_generator(): + async for page in self.pages: + for response in page.corpora: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListDocumentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListDocumentsResponse], + request: retriever_service.ListDocumentsRequest, + response: retriever_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListDocumentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[retriever.Document]: + for page in self.pages: + yield from page.documents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsAsyncPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListDocumentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListDocumentsResponse]], + request: retriever_service.ListDocumentsRequest, + response: retriever_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListDocumentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Document]: + async def async_generator(): + async for page in self.pages: + for response in page.documents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChunksPager: + """A pager for iterating through ``list_chunks`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListChunksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``chunks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChunks`` requests and continue to iterate + through the ``chunks`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListChunksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListChunksResponse], + request: retriever_service.ListChunksRequest, + response: retriever_service.ListChunksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListChunksRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListChunksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListChunksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListChunksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[retriever.Chunk]: + for page in self.pages: + yield from page.chunks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChunksAsyncPager: + """A pager for iterating through ``list_chunks`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1beta.types.ListChunksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``chunks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChunks`` requests and continue to iterate + through the ``chunks`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1beta.types.ListChunksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListChunksResponse]], + request: retriever_service.ListChunksRequest, + response: retriever_service.ListChunksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1beta.types.ListChunksRequest): + The initial request object. + response (google.ai.generativelanguage_v1beta.types.ListChunksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = retriever_service.ListChunksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListChunksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Chunk]: + async def async_generator(): + async for page in self.pages: + for response in page.chunks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/__init__.py new file mode 100644 index 000000000000..7f8233ec6de5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RetrieverServiceTransport +from .grpc import RetrieverServiceGrpcTransport +from .grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport +from .rest import RetrieverServiceRestInterceptor, RetrieverServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RetrieverServiceTransport]] +_transport_registry["grpc"] = RetrieverServiceGrpcTransport +_transport_registry["grpc_asyncio"] = RetrieverServiceGrpcAsyncIOTransport +_transport_registry["rest"] = RetrieverServiceRestTransport + +__all__ = ( + "RetrieverServiceTransport", + "RetrieverServiceGrpcTransport", + "RetrieverServiceGrpcAsyncIOTransport", + "RetrieverServiceRestTransport", + "RetrieverServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/base.py new file mode 100644 index 000000000000..8d0d0c05b46f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/base.py @@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RetrieverServiceTransport(abc.ABC): + """Abstract transport class for RetrieverService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_corpus: gapic_v1.method.wrap_method( + self.create_corpus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_corpus: gapic_v1.method.wrap_method( + self.get_corpus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_corpus: gapic_v1.method.wrap_method( + self.update_corpus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_corpus: gapic_v1.method.wrap_method( + self.delete_corpus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_corpora: gapic_v1.method.wrap_method( + self.list_corpora, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.query_corpus: gapic_v1.method.wrap_method( + self.query_corpus, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_document: gapic_v1.method.wrap_method( + self.create_document, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_document: gapic_v1.method.wrap_method( + self.get_document, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_document: gapic_v1.method.wrap_method( + self.update_document, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_document: gapic_v1.method.wrap_method( + self.delete_document, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_documents: gapic_v1.method.wrap_method( + self.list_documents, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.query_document: gapic_v1.method.wrap_method( + self.query_document, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_chunk: gapic_v1.method.wrap_method( + self.create_chunk, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_create_chunks: gapic_v1.method.wrap_method( + self.batch_create_chunks, + default_timeout=None, + client_info=client_info, + ), + self.get_chunk: gapic_v1.method.wrap_method( + self.get_chunk, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_chunk: gapic_v1.method.wrap_method( + self.update_chunk, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_update_chunks: gapic_v1.method.wrap_method( + self.batch_update_chunks, + default_timeout=None, + client_info=client_info, + ), + self.delete_chunk: gapic_v1.method.wrap_method( + self.delete_chunk, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_delete_chunks: gapic_v1.method.wrap_method( + self.batch_delete_chunks, + default_timeout=None, + client_info=client_info, + ), + self.list_chunks: gapic_v1.method.wrap_method( + self.list_chunks, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_corpus( + self, + ) -> Callable[ + [retriever_service.CreateCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def get_corpus( + self, + ) -> Callable[ + [retriever_service.GetCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def update_corpus( + self, + ) -> Callable[ + [retriever_service.UpdateCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def delete_corpus( + self, + ) -> Callable[ + [retriever_service.DeleteCorpusRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], + Union[ + retriever_service.ListCorporaResponse, + Awaitable[retriever_service.ListCorporaResponse], + ], + ]: + raise NotImplementedError() + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], + Union[ + retriever_service.QueryCorpusResponse, + Awaitable[retriever_service.QueryCorpusResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_document( + self, + ) -> Callable[ + [retriever_service.CreateDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def get_document( + self, + ) -> Callable[ + [retriever_service.GetDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def update_document( + self, + ) -> Callable[ + [retriever_service.UpdateDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def delete_document( + self, + ) -> Callable[ + [retriever_service.DeleteDocumentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + Union[ + retriever_service.ListDocumentsResponse, + Awaitable[retriever_service.ListDocumentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + Union[ + retriever_service.QueryDocumentResponse, + Awaitable[retriever_service.QueryDocumentResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_chunk( + self, + ) -> Callable[ + [retriever_service.CreateChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + Union[ + retriever_service.BatchCreateChunksResponse, + Awaitable[retriever_service.BatchCreateChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_chunk( + self, + ) -> Callable[ + [retriever_service.GetChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def update_chunk( + self, + ) -> Callable[ + [retriever_service.UpdateChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + Union[ + retriever_service.BatchUpdateChunksResponse, + Awaitable[retriever_service.BatchUpdateChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_chunk( + self, + ) -> Callable[ + [retriever_service.DeleteChunkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def batch_delete_chunks( + self, + ) -> Callable[ + [retriever_service.BatchDeleteChunksRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], + Union[ + retriever_service.ListChunksResponse, + Awaitable[retriever_service.ListChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RetrieverServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc.py new file mode 100644 index 000000000000..456aab78ac6b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport + + +class RetrieverServiceGrpcTransport(RetrieverServiceTransport): + """gRPC backend transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], retriever.Corpus]: + r"""Return a callable for the create corpus method over gRPC. + + Creates an empty ``Corpus``. + + Returns: + Callable[[~.CreateCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_corpus" not in self._stubs: + self._stubs["create_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateCorpus", + request_serializer=retriever_service.CreateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["create_corpus"] + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], retriever.Corpus]: + r"""Return a callable for the get corpus method over gRPC. + + Gets information about a specific ``Corpus``. + + Returns: + Callable[[~.GetCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_corpus" not in self._stubs: + self._stubs["get_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetCorpus", + request_serializer=retriever_service.GetCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["get_corpus"] + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], retriever.Corpus]: + r"""Return a callable for the update corpus method over gRPC. + + Updates a ``Corpus``. + + Returns: + Callable[[~.UpdateCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_corpus" not in self._stubs: + self._stubs["update_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateCorpus", + request_serializer=retriever_service.UpdateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["update_corpus"] + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], empty_pb2.Empty]: + r"""Return a callable for the delete corpus method over gRPC. + + Deletes a ``Corpus``. + + Returns: + Callable[[~.DeleteCorpusRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_corpus" not in self._stubs: + self._stubs["delete_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteCorpus", + request_serializer=retriever_service.DeleteCorpusRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_corpus"] + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], retriever_service.ListCorporaResponse + ]: + r"""Return a callable for the list corpora method over gRPC. + + Lists all ``Corpora`` owned by the user. + + Returns: + Callable[[~.ListCorporaRequest], + ~.ListCorporaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_corpora" not in self._stubs: + self._stubs["list_corpora"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListCorpora", + request_serializer=retriever_service.ListCorporaRequest.serialize, + response_deserializer=retriever_service.ListCorporaResponse.deserialize, + ) + return self._stubs["list_corpora"] + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], retriever_service.QueryCorpusResponse + ]: + r"""Return a callable for the query corpus method over gRPC. + + Performs semantic search over a ``Corpus``. + + Returns: + Callable[[~.QueryCorpusRequest], + ~.QueryCorpusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_corpus" not in self._stubs: + self._stubs["query_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/QueryCorpus", + request_serializer=retriever_service.QueryCorpusRequest.serialize, + response_deserializer=retriever_service.QueryCorpusResponse.deserialize, + ) + return self._stubs["query_corpus"] + + @property + def create_document( + self, + ) -> Callable[[retriever_service.CreateDocumentRequest], retriever.Document]: + r"""Return a callable for the create document method over gRPC. + + Creates an empty ``Document``. + + Returns: + Callable[[~.CreateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateDocument", + request_serializer=retriever_service.CreateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def get_document( + self, + ) -> Callable[[retriever_service.GetDocumentRequest], retriever.Document]: + r"""Return a callable for the get document method over gRPC. + + Gets information about a specific ``Document``. + + Returns: + Callable[[~.GetDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetDocument", + request_serializer=retriever_service.GetDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def update_document( + self, + ) -> Callable[[retriever_service.UpdateDocumentRequest], retriever.Document]: + r"""Return a callable for the update document method over gRPC. + + Updates a ``Document``. + + Returns: + Callable[[~.UpdateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateDocument", + request_serializer=retriever_service.UpdateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[[retriever_service.DeleteDocumentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a ``Document``. + + Returns: + Callable[[~.DeleteDocumentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteDocument", + request_serializer=retriever_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + retriever_service.ListDocumentsResponse, + ]: + r"""Return a callable for the list documents method over gRPC. + + Lists all ``Document``\ s in a ``Corpus``. + + Returns: + Callable[[~.ListDocumentsRequest], + ~.ListDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListDocuments", + request_serializer=retriever_service.ListDocumentsRequest.serialize, + response_deserializer=retriever_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + retriever_service.QueryDocumentResponse, + ]: + r"""Return a callable for the query document method over gRPC. + + Performs semantic search over a ``Document``. + + Returns: + Callable[[~.QueryDocumentRequest], + ~.QueryDocumentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_document" not in self._stubs: + self._stubs["query_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/QueryDocument", + request_serializer=retriever_service.QueryDocumentRequest.serialize, + response_deserializer=retriever_service.QueryDocumentResponse.deserialize, + ) + return self._stubs["query_document"] + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], retriever.Chunk]: + r"""Return a callable for the create chunk method over gRPC. + + Creates a ``Chunk``. + + Returns: + Callable[[~.CreateChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_chunk" not in self._stubs: + self._stubs["create_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateChunk", + request_serializer=retriever_service.CreateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["create_chunk"] + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + retriever_service.BatchCreateChunksResponse, + ]: + r"""Return a callable for the batch create chunks method over gRPC. + + Batch create ``Chunk``\ s. + + Returns: + Callable[[~.BatchCreateChunksRequest], + ~.BatchCreateChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_chunks" not in self._stubs: + self._stubs["batch_create_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchCreateChunks", + request_serializer=retriever_service.BatchCreateChunksRequest.serialize, + response_deserializer=retriever_service.BatchCreateChunksResponse.deserialize, + ) + return self._stubs["batch_create_chunks"] + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], retriever.Chunk]: + r"""Return a callable for the get chunk method over gRPC. + + Gets information about a specific ``Chunk``. + + Returns: + Callable[[~.GetChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_chunk" not in self._stubs: + self._stubs["get_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetChunk", + request_serializer=retriever_service.GetChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["get_chunk"] + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], retriever.Chunk]: + r"""Return a callable for the update chunk method over gRPC. + + Updates a ``Chunk``. + + Returns: + Callable[[~.UpdateChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_chunk" not in self._stubs: + self._stubs["update_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateChunk", + request_serializer=retriever_service.UpdateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["update_chunk"] + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + retriever_service.BatchUpdateChunksResponse, + ]: + r"""Return a callable for the batch update chunks method over gRPC. + + Batch update ``Chunk``\ s. + + Returns: + Callable[[~.BatchUpdateChunksRequest], + ~.BatchUpdateChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_chunks" not in self._stubs: + self._stubs["batch_update_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchUpdateChunks", + request_serializer=retriever_service.BatchUpdateChunksRequest.serialize, + response_deserializer=retriever_service.BatchUpdateChunksResponse.deserialize, + ) + return self._stubs["batch_update_chunks"] + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], empty_pb2.Empty]: + r"""Return a callable for the delete chunk method over gRPC. + + Deletes a ``Chunk``. + + Returns: + Callable[[~.DeleteChunkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_chunk" not in self._stubs: + self._stubs["delete_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteChunk", + request_serializer=retriever_service.DeleteChunkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_chunk"] + + @property + def batch_delete_chunks( + self, + ) -> Callable[[retriever_service.BatchDeleteChunksRequest], empty_pb2.Empty]: + r"""Return a callable for the batch delete chunks method over gRPC. + + Batch delete ``Chunk``\ s. + + Returns: + Callable[[~.BatchDeleteChunksRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_chunks" not in self._stubs: + self._stubs["batch_delete_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchDeleteChunks", + request_serializer=retriever_service.BatchDeleteChunksRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_chunks"] + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], retriever_service.ListChunksResponse + ]: + r"""Return a callable for the list chunks method over gRPC. + + Lists all ``Chunk``\ s in a ``Document``. + + Returns: + Callable[[~.ListChunksRequest], + ~.ListChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_chunks" not in self._stubs: + self._stubs["list_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListChunks", + request_serializer=retriever_service.ListChunksRequest.serialize, + response_deserializer=retriever_service.ListChunksResponse.deserialize, + ) + return self._stubs["list_chunks"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RetrieverServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f5ddf0b50dc0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/grpc_asyncio.py @@ -0,0 +1,792 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .grpc import RetrieverServiceGrpcTransport + + +class RetrieverServiceGrpcAsyncIOTransport(RetrieverServiceTransport): + """gRPC AsyncIO backend transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the create corpus method over gRPC. + + Creates an empty ``Corpus``. + + Returns: + Callable[[~.CreateCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_corpus" not in self._stubs: + self._stubs["create_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateCorpus", + request_serializer=retriever_service.CreateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["create_corpus"] + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the get corpus method over gRPC. + + Gets information about a specific ``Corpus``. + + Returns: + Callable[[~.GetCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_corpus" not in self._stubs: + self._stubs["get_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetCorpus", + request_serializer=retriever_service.GetCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["get_corpus"] + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the update corpus method over gRPC. + + Updates a ``Corpus``. + + Returns: + Callable[[~.UpdateCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_corpus" not in self._stubs: + self._stubs["update_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateCorpus", + request_serializer=retriever_service.UpdateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["update_corpus"] + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete corpus method over gRPC. + + Deletes a ``Corpus``. + + Returns: + Callable[[~.DeleteCorpusRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_corpus" not in self._stubs: + self._stubs["delete_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteCorpus", + request_serializer=retriever_service.DeleteCorpusRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_corpus"] + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], + Awaitable[retriever_service.ListCorporaResponse], + ]: + r"""Return a callable for the list corpora method over gRPC. + + Lists all ``Corpora`` owned by the user. + + Returns: + Callable[[~.ListCorporaRequest], + Awaitable[~.ListCorporaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_corpora" not in self._stubs: + self._stubs["list_corpora"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListCorpora", + request_serializer=retriever_service.ListCorporaRequest.serialize, + response_deserializer=retriever_service.ListCorporaResponse.deserialize, + ) + return self._stubs["list_corpora"] + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], + Awaitable[retriever_service.QueryCorpusResponse], + ]: + r"""Return a callable for the query corpus method over gRPC. + + Performs semantic search over a ``Corpus``. + + Returns: + Callable[[~.QueryCorpusRequest], + Awaitable[~.QueryCorpusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_corpus" not in self._stubs: + self._stubs["query_corpus"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/QueryCorpus", + request_serializer=retriever_service.QueryCorpusRequest.serialize, + response_deserializer=retriever_service.QueryCorpusResponse.deserialize, + ) + return self._stubs["query_corpus"] + + @property + def create_document( + self, + ) -> Callable[ + [retriever_service.CreateDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the create document method over gRPC. + + Creates an empty ``Document``. + + Returns: + Callable[[~.CreateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateDocument", + request_serializer=retriever_service.CreateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def get_document( + self, + ) -> Callable[ + [retriever_service.GetDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the get document method over gRPC. + + Gets information about a specific ``Document``. + + Returns: + Callable[[~.GetDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetDocument", + request_serializer=retriever_service.GetDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def update_document( + self, + ) -> Callable[ + [retriever_service.UpdateDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the update document method over gRPC. + + Updates a ``Document``. + + Returns: + Callable[[~.UpdateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateDocument", + request_serializer=retriever_service.UpdateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[ + [retriever_service.DeleteDocumentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a ``Document``. + + Returns: + Callable[[~.DeleteDocumentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteDocument", + request_serializer=retriever_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + Awaitable[retriever_service.ListDocumentsResponse], + ]: + r"""Return a callable for the list documents method over gRPC. + + Lists all ``Document``\ s in a ``Corpus``. + + Returns: + Callable[[~.ListDocumentsRequest], + Awaitable[~.ListDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListDocuments", + request_serializer=retriever_service.ListDocumentsRequest.serialize, + response_deserializer=retriever_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + Awaitable[retriever_service.QueryDocumentResponse], + ]: + r"""Return a callable for the query document method over gRPC. + + Performs semantic search over a ``Document``. + + Returns: + Callable[[~.QueryDocumentRequest], + Awaitable[~.QueryDocumentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_document" not in self._stubs: + self._stubs["query_document"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/QueryDocument", + request_serializer=retriever_service.QueryDocumentRequest.serialize, + response_deserializer=retriever_service.QueryDocumentResponse.deserialize, + ) + return self._stubs["query_document"] + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the create chunk method over gRPC. + + Creates a ``Chunk``. + + Returns: + Callable[[~.CreateChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_chunk" not in self._stubs: + self._stubs["create_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/CreateChunk", + request_serializer=retriever_service.CreateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["create_chunk"] + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + Awaitable[retriever_service.BatchCreateChunksResponse], + ]: + r"""Return a callable for the batch create chunks method over gRPC. + + Batch create ``Chunk``\ s. + + Returns: + Callable[[~.BatchCreateChunksRequest], + Awaitable[~.BatchCreateChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_chunks" not in self._stubs: + self._stubs["batch_create_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchCreateChunks", + request_serializer=retriever_service.BatchCreateChunksRequest.serialize, + response_deserializer=retriever_service.BatchCreateChunksResponse.deserialize, + ) + return self._stubs["batch_create_chunks"] + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the get chunk method over gRPC. + + Gets information about a specific ``Chunk``. + + Returns: + Callable[[~.GetChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_chunk" not in self._stubs: + self._stubs["get_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/GetChunk", + request_serializer=retriever_service.GetChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["get_chunk"] + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the update chunk method over gRPC. + + Updates a ``Chunk``. + + Returns: + Callable[[~.UpdateChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_chunk" not in self._stubs: + self._stubs["update_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/UpdateChunk", + request_serializer=retriever_service.UpdateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["update_chunk"] + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + Awaitable[retriever_service.BatchUpdateChunksResponse], + ]: + r"""Return a callable for the batch update chunks method over gRPC. + + Batch update ``Chunk``\ s. + + Returns: + Callable[[~.BatchUpdateChunksRequest], + Awaitable[~.BatchUpdateChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_chunks" not in self._stubs: + self._stubs["batch_update_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchUpdateChunks", + request_serializer=retriever_service.BatchUpdateChunksRequest.serialize, + response_deserializer=retriever_service.BatchUpdateChunksResponse.deserialize, + ) + return self._stubs["batch_update_chunks"] + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete chunk method over gRPC. + + Deletes a ``Chunk``. + + Returns: + Callable[[~.DeleteChunkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_chunk" not in self._stubs: + self._stubs["delete_chunk"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/DeleteChunk", + request_serializer=retriever_service.DeleteChunkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_chunk"] + + @property + def batch_delete_chunks( + self, + ) -> Callable[ + [retriever_service.BatchDeleteChunksRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the batch delete chunks method over gRPC. + + Batch delete ``Chunk``\ s. + + Returns: + Callable[[~.BatchDeleteChunksRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_chunks" not in self._stubs: + self._stubs["batch_delete_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/BatchDeleteChunks", + request_serializer=retriever_service.BatchDeleteChunksRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_chunks"] + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], + Awaitable[retriever_service.ListChunksResponse], + ]: + r"""Return a callable for the list chunks method over gRPC. + + Lists all ``Chunk``\ s in a ``Document``. + + Returns: + Callable[[~.ListChunksRequest], + Awaitable[~.ListChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_chunks" not in self._stubs: + self._stubs["list_chunks"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.RetrieverService/ListChunks", + request_serializer=retriever_service.ListChunksRequest.serialize, + response_deserializer=retriever_service.ListChunksResponse.deserialize, + ) + return self._stubs["list_chunks"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("RetrieverServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py new file mode 100644 index 000000000000..a1e2399d50c3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py @@ -0,0 +1,2717 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import RetrieverServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RetrieverServiceRestInterceptor: + """Interceptor for RetrieverService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RetrieverServiceRestTransport. + + .. code-block:: python + class MyCustomRetrieverServiceInterceptor(RetrieverServiceRestInterceptor): + def pre_batch_create_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_delete_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_batch_update_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_corpora(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_corpora(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RetrieverServiceRestTransport(interceptor=MyCustomRetrieverServiceInterceptor()) + client = RetrieverServiceClient(transport=transport) + + + """ + + def pre_batch_create_chunks( + self, + request: retriever_service.BatchCreateChunksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.BatchCreateChunksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_batch_create_chunks( + self, response: retriever_service.BatchCreateChunksResponse + ) -> retriever_service.BatchCreateChunksResponse: + """Post-rpc interceptor for batch_create_chunks + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_batch_delete_chunks( + self, + request: retriever_service.BatchDeleteChunksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.BatchDeleteChunksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_delete_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_batch_update_chunks( + self, + request: retriever_service.BatchUpdateChunksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.BatchUpdateChunksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_update_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_batch_update_chunks( + self, response: retriever_service.BatchUpdateChunksResponse + ) -> retriever_service.BatchUpdateChunksResponse: + """Post-rpc interceptor for batch_update_chunks + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_create_chunk( + self, + request: retriever_service.CreateChunkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.CreateChunkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for create_chunk + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_create_corpus( + self, + request: retriever_service.CreateCorpusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.CreateCorpusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for create_corpus + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_create_document( + self, + request: retriever_service.CreateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.CreateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for create_document + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_delete_chunk( + self, + request: retriever_service.DeleteChunkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.DeleteChunkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_delete_corpus( + self, + request: retriever_service.DeleteCorpusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.DeleteCorpusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_delete_document( + self, + request: retriever_service.DeleteDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.DeleteDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_get_chunk( + self, + request: retriever_service.GetChunkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.GetChunkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for get_chunk + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_get_corpus( + self, + request: retriever_service.GetCorpusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.GetCorpusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for get_corpus + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_get_document( + self, + request: retriever_service.GetDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.GetDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for get_document + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_list_chunks( + self, + request: retriever_service.ListChunksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.ListChunksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_chunks( + self, response: retriever_service.ListChunksResponse + ) -> retriever_service.ListChunksResponse: + """Post-rpc interceptor for list_chunks + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_list_corpora( + self, + request: retriever_service.ListCorporaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.ListCorporaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_corpora + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_corpora( + self, response: retriever_service.ListCorporaResponse + ) -> retriever_service.ListCorporaResponse: + """Post-rpc interceptor for list_corpora + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_list_documents( + self, + request: retriever_service.ListDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.ListDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_documents( + self, response: retriever_service.ListDocumentsResponse + ) -> retriever_service.ListDocumentsResponse: + """Post-rpc interceptor for list_documents + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_query_corpus( + self, + request: retriever_service.QueryCorpusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.QueryCorpusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_query_corpus( + self, response: retriever_service.QueryCorpusResponse + ) -> retriever_service.QueryCorpusResponse: + """Post-rpc interceptor for query_corpus + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_query_document( + self, + request: retriever_service.QueryDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.QueryDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_query_document( + self, response: retriever_service.QueryDocumentResponse + ) -> retriever_service.QueryDocumentResponse: + """Post-rpc interceptor for query_document + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_update_chunk( + self, + request: retriever_service.UpdateChunkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.UpdateChunkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for update_chunk + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_update_corpus( + self, + request: retriever_service.UpdateCorpusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.UpdateCorpusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for update_corpus + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_update_document( + self, + request: retriever_service.UpdateDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[retriever_service.UpdateDocumentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for update_document + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RetrieverServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RetrieverServiceRestInterceptor + + +class RetrieverServiceRestTransport(RetrieverServiceTransport): + """REST backend transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RetrieverServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RetrieverServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateChunks(RetrieverServiceRestStub): + def __hash__(self): + return hash("BatchCreateChunks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.BatchCreateChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Call the batch create chunks method over HTTP. + + Args: + request (~.retriever_service.BatchCreateChunksRequest): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.BatchCreateChunksResponse: + Response from ``BatchCreateChunks`` containing a list of + created ``Chunk``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*/documents/*}/chunks:batchCreate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_create_chunks( + request, metadata + ) + pb_request = retriever_service.BatchCreateChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.BatchCreateChunksResponse() + pb_resp = retriever_service.BatchCreateChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_chunks(resp) + return resp + + class _BatchDeleteChunks(RetrieverServiceRestStub): + def __hash__(self): + return hash("BatchDeleteChunks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.BatchDeleteChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the batch delete chunks method over HTTP. + + Args: + request (~.retriever_service.BatchDeleteChunksRequest): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*/documents/*}/chunks:batchDelete", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_delete_chunks( + request, metadata + ) + pb_request = retriever_service.BatchDeleteChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _BatchUpdateChunks(RetrieverServiceRestStub): + def __hash__(self): + return hash("BatchUpdateChunks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.BatchUpdateChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Call the batch update chunks method over HTTP. + + Args: + request (~.retriever_service.BatchUpdateChunksRequest): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.BatchUpdateChunksResponse: + Response from ``BatchUpdateChunks`` containing a list of + updated ``Chunk``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*/documents/*}/chunks:batchUpdate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_update_chunks( + request, metadata + ) + pb_request = retriever_service.BatchUpdateChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.BatchUpdateChunksResponse() + pb_resp = retriever_service.BatchUpdateChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_chunks(resp) + return resp + + class _CreateChunk(RetrieverServiceRestStub): + def __hash__(self): + return hash("CreateChunk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.CreateChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Call the create chunk method over HTTP. + + Args: + request (~.retriever_service.CreateChunkRequest): + The request object. Request to create a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*/documents/*}/chunks", + "body": "chunk", + }, + ] + request, metadata = self._interceptor.pre_create_chunk(request, metadata) + pb_request = retriever_service.CreateChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_chunk(resp) + return resp + + class _CreateCorpus(RetrieverServiceRestStub): + def __hash__(self): + return hash("CreateCorpus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.CreateCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Call the create corpus method over HTTP. + + Args: + request (~.retriever_service.CreateCorpusRequest): + The request object. Request to create a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/corpora", + "body": "corpus", + }, + ] + request, metadata = self._interceptor.pre_create_corpus(request, metadata) + pb_request = retriever_service.CreateCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_corpus(resp) + return resp + + class _CreateDocument(RetrieverServiceRestStub): + def __hash__(self): + return hash("CreateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.CreateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Call the create document method over HTTP. + + Args: + request (~.retriever_service.CreateDocumentRequest): + The request object. Request to create a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=corpora/*}/documents", + "body": "document", + }, + ] + request, metadata = self._interceptor.pre_create_document(request, metadata) + pb_request = retriever_service.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_document(resp) + return resp + + class _DeleteChunk(RetrieverServiceRestStub): + def __hash__(self): + return hash("DeleteChunk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.DeleteChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete chunk method over HTTP. + + Args: + request (~.retriever_service.DeleteChunkRequest): + The request object. Request to delete a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=corpora/*/documents/*/chunks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_chunk(request, metadata) + pb_request = retriever_service.DeleteChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteCorpus(RetrieverServiceRestStub): + def __hash__(self): + return hash("DeleteCorpus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.DeleteCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete corpus method over HTTP. + + Args: + request (~.retriever_service.DeleteCorpusRequest): + The request object. Request to delete a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=corpora/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_corpus(request, metadata) + pb_request = retriever_service.DeleteCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDocument(RetrieverServiceRestStub): + def __hash__(self): + return hash("DeleteDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.DeleteDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete document method over HTTP. + + Args: + request (~.retriever_service.DeleteDocumentRequest): + The request object. Request to delete a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=corpora/*/documents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_document(request, metadata) + pb_request = retriever_service.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetChunk(RetrieverServiceRestStub): + def __hash__(self): + return hash("GetChunk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.GetChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Call the get chunk method over HTTP. + + Args: + request (~.retriever_service.GetChunkRequest): + The request object. Request for getting information about a specific + ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=corpora/*/documents/*/chunks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_chunk(request, metadata) + pb_request = retriever_service.GetChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_chunk(resp) + return resp + + class _GetCorpus(RetrieverServiceRestStub): + def __hash__(self): + return hash("GetCorpus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.GetCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Call the get corpus method over HTTP. + + Args: + request (~.retriever_service.GetCorpusRequest): + The request object. Request for getting information about a specific + ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=corpora/*}", + }, + ] + request, metadata = self._interceptor.pre_get_corpus(request, metadata) + pb_request = retriever_service.GetCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_corpus(resp) + return resp + + class _GetDocument(RetrieverServiceRestStub): + def __hash__(self): + return hash("GetDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.GetDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Call the get document method over HTTP. + + Args: + request (~.retriever_service.GetDocumentRequest): + The request object. Request for getting information about a specific + ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=corpora/*/documents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_document(request, metadata) + pb_request = retriever_service.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_document(resp) + return resp + + class _ListChunks(RetrieverServiceRestStub): + def __hash__(self): + return hash("ListChunks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.ListChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.ListChunksResponse: + r"""Call the list chunks method over HTTP. + + Args: + request (~.retriever_service.ListChunksRequest): + The request object. Request for listing ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.ListChunksResponse: + Response from ``ListChunks`` containing a paginated list + of ``Chunk``\ s. The ``Chunk``\ s are sorted by + ascending ``chunk.create_time``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=corpora/*/documents/*}/chunks", + }, + ] + request, metadata = self._interceptor.pre_list_chunks(request, metadata) + pb_request = retriever_service.ListChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListChunksResponse() + pb_resp = retriever_service.ListChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_chunks(resp) + return resp + + class _ListCorpora(RetrieverServiceRestStub): + def __hash__(self): + return hash("ListCorpora") + + def __call__( + self, + request: retriever_service.ListCorporaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.ListCorporaResponse: + r"""Call the list corpora method over HTTP. + + Args: + request (~.retriever_service.ListCorporaRequest): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.ListCorporaResponse: + Response from ``ListCorpora`` containing a paginated + list of ``Corpora``. The results are sorted by ascending + ``corpus.create_time``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/corpora", + }, + ] + request, metadata = self._interceptor.pre_list_corpora(request, metadata) + pb_request = retriever_service.ListCorporaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListCorporaResponse() + pb_resp = retriever_service.ListCorporaResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_corpora(resp) + return resp + + class _ListDocuments(RetrieverServiceRestStub): + def __hash__(self): + return hash("ListDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.ListDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.ListDocumentsResponse: + r"""Call the list documents method over HTTP. + + Args: + request (~.retriever_service.ListDocumentsRequest): + The request object. Request for listing ``Document``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.ListDocumentsResponse: + Response from ``ListDocuments`` containing a paginated + list of ``Document``\ s. The ``Document``\ s are sorted + by ascending ``document.create_time``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=corpora/*}/documents", + }, + ] + request, metadata = self._interceptor.pre_list_documents(request, metadata) + pb_request = retriever_service.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListDocumentsResponse() + pb_resp = retriever_service.ListDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_documents(resp) + return resp + + class _QueryCorpus(RetrieverServiceRestStub): + def __hash__(self): + return hash("QueryCorpus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.QueryCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Call the query corpus method over HTTP. + + Args: + request (~.retriever_service.QueryCorpusRequest): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.QueryCorpusResponse: + Response from ``QueryCorpus`` containing a list of + relevant chunks. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=corpora/*}:query", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_query_corpus(request, metadata) + pb_request = retriever_service.QueryCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.QueryCorpusResponse() + pb_resp = retriever_service.QueryCorpusResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_corpus(resp) + return resp + + class _QueryDocument(RetrieverServiceRestStub): + def __hash__(self): + return hash("QueryDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.QueryDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Call the query document method over HTTP. + + Args: + request (~.retriever_service.QueryDocumentRequest): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever_service.QueryDocumentResponse: + Response from ``QueryDocument`` containing a list of + relevant chunks. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=corpora/*/documents/*}:query", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_query_document(request, metadata) + pb_request = retriever_service.QueryDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.QueryDocumentResponse() + pb_resp = retriever_service.QueryDocumentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_document(resp) + return resp + + class _UpdateChunk(RetrieverServiceRestStub): + def __hash__(self): + return hash("UpdateChunk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.UpdateChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Chunk: + r"""Call the update chunk method over HTTP. + + Args: + request (~.retriever_service.UpdateChunkRequest): + The request object. Request to update a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{chunk.name=corpora/*/documents/*/chunks/*}", + "body": "chunk", + }, + ] + request, metadata = self._interceptor.pre_update_chunk(request, metadata) + pb_request = retriever_service.UpdateChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_chunk(resp) + return resp + + class _UpdateCorpus(RetrieverServiceRestStub): + def __hash__(self): + return hash("UpdateCorpus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.UpdateCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Corpus: + r"""Call the update corpus method over HTTP. + + Args: + request (~.retriever_service.UpdateCorpusRequest): + The request object. Request to update a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{corpus.name=corpora/*}", + "body": "corpus", + }, + ] + request, metadata = self._interceptor.pre_update_corpus(request, metadata) + pb_request = retriever_service.UpdateCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_corpus(resp) + return resp + + class _UpdateDocument(RetrieverServiceRestStub): + def __hash__(self): + return hash("UpdateDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: retriever_service.UpdateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> retriever.Document: + r"""Call the update document method over HTTP. + + Args: + request (~.retriever_service.UpdateDocumentRequest): + The request object. Request to update a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{document.name=corpora/*/documents/*}", + "body": "document", + }, + ] + request, metadata = self._interceptor.pre_update_document(request, metadata) + pb_request = retriever_service.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_document(resp) + return resp + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + retriever_service.BatchCreateChunksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_delete_chunks( + self, + ) -> Callable[[retriever_service.BatchDeleteChunksRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchDeleteChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + retriever_service.BatchUpdateChunksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_document( + self, + ) -> Callable[[retriever_service.CreateDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document( + self, + ) -> Callable[[retriever_service.DeleteDocumentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document( + self, + ) -> Callable[[retriever_service.GetDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], retriever_service.ListChunksResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], retriever_service.ListCorporaResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCorpora(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + retriever_service.ListDocumentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], retriever_service.QueryCorpusResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + retriever_service.QueryDocumentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document( + self, + ) -> Callable[[retriever_service.UpdateDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RetrieverServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/__init__.py new file mode 100644 index 000000000000..f705e582e7a1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextServiceAsyncClient +from .client import TextServiceClient + +__all__ = ( + "TextServiceClient", + "TextServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py new file mode 100644 index 000000000000..ecc68e1f50ab --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/async_client.py @@ -0,0 +1,835 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import safety, text_service + +from .client import TextServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport + + +class TextServiceAsyncClient: + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + _client: TextServiceClient + + DEFAULT_ENDPOINT = TextServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextServiceClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextServiceClient.model_path) + parse_model_path = staticmethod(TextServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + TextServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TextServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TextServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TextServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + TextServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TextServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TextServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(TextServiceClient.common_location_path) + parse_common_location_path = staticmethod( + TextServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_info.__func__(TextServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_file.__func__(TextServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextServiceClient).get_transport_class, type(TextServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TextServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.GenerateTextRequest, dict]]): + The request object. Request to generate a text completion + response from the model. + model (:class:`str`): + Required. The name of the ``Model`` or ``TunedModel`` to + use for generating the completion. Examples: + models/text-bison-001 + tunedModels/sentence-translator-u3b7m + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta.types.TextPrompt`): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Optional. Controls the randomness of the output. Note: + The default value varies by model, see the + ``Model.temperature`` attribute of the ``Model`` + returned the ``getModel`` function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (:class:`int`): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit + specified in the ``Model`` specification. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most likely tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.GenerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_text, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.EmbedTextRequest, dict]]): + The request object. Request to get a text embedding from + the model. + model (:class:`str`): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (:class:`str`): + Optional. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.EmbedTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.embed_text, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_embed_text( + self, + request: Optional[Union[text_service.BatchEmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + texts: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.batch_embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.BatchEmbedTextRequest, dict]]): + The request object. Batch request to get a text embedding + from the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the embedding. Examples: + models/embedding-gecko-001 + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + texts (:class:`MutableSequence[str]`): + Optional. The free-form input texts + that the model will turn into an + embedding. The current limit is 100 + texts, over which an error will be + thrown. + + This corresponds to the ``texts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, texts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.BatchEmbedTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if texts: + request.texts.extend(texts) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_embed_text, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_text_tokens( + self, + request: Optional[Union[text_service.CountTextTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Runs a model's tokenizer on a text and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_text_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.CountTextTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1beta.types.TextPrompt`): + Required. The free-form input text + given to the model as a prompt. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountTextTokensResponse: + A response from CountTextTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = text_service.CountTextTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.count_text_tokens, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py new file mode 100644 index 000000000000..bcdf9bb33a6d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py @@ -0,0 +1,1030 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import safety, text_service + +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc import TextServiceGrpcTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .transports.rest import TextServiceRestTransport + + +class TextServiceClientMeta(type): + """Metaclass for the TextService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] + _transport_registry["grpc"] = TextServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TextServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextServiceClient(metaclass=TextServiceClientMeta): + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TextServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextServiceTransport): + # transport is a TextServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.GenerateTextRequest, dict]): + The request object. Request to generate a text completion + response from the model. + model (str): + Required. The name of the ``Model`` or ``TunedModel`` to + use for generating the completion. Examples: + models/text-bison-001 + tunedModels/sentence-translator-u3b7m + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta.types.TextPrompt): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Optional. Controls the randomness of the output. Note: + The default value varies by model, see the + ``Model.temperature`` attribute of the ``Model`` + returned the ``getModel`` function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit + specified in the ``Model`` specification. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most likely tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.GenerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.GenerateTextRequest): + request = text_service.GenerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.EmbedTextRequest, dict]): + The request object. Request to get a text embedding from + the model. + model (str): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (str): + Optional. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.EmbedTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.EmbedTextRequest): + request = text_service.EmbedTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_embed_text( + self, + request: Optional[Union[text_service.BatchEmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + texts: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.batch_embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.BatchEmbedTextRequest, dict]): + The request object. Batch request to get a text embedding + from the model. + model (str): + Required. The name of the ``Model`` to use for + generating the embedding. Examples: + models/embedding-gecko-001 + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + texts (MutableSequence[str]): + Optional. The free-form input texts + that the model will turn into an + embedding. The current limit is 100 + texts, over which an error will be + thrown. + + This corresponds to the ``texts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, texts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.BatchEmbedTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.BatchEmbedTextRequest): + request = text_service.BatchEmbedTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if texts is not None: + request.texts = texts + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_embed_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_text_tokens( + self, + request: Optional[Union[text_service.CountTextTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Runs a model's tokenizer on a text and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_text_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.CountTextTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1beta.types.TextPrompt): + Required. The free-form input text + given to the model as a prompt. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.CountTextTokensResponse: + A response from CountTextTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a text_service.CountTextTokensRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, text_service.CountTextTokensRequest): + request = text_service.CountTextTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_text_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/__init__.py new file mode 100644 index 000000000000..63721cb6cb66 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextServiceTransport +from .grpc import TextServiceGrpcTransport +from .grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .rest import TextServiceRestInterceptor, TextServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] +_transport_registry["grpc"] = TextServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TextServiceRestTransport + +__all__ = ( + "TextServiceTransport", + "TextServiceGrpcTransport", + "TextServiceGrpcAsyncIOTransport", + "TextServiceRestTransport", + "TextServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/base.py new file mode 100644 index 000000000000..710f1899c143 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/base.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import text_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextServiceTransport(abc.ABC): + """Abstract transport class for TextService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_text: gapic_v1.method.wrap_method( + self.generate_text, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.embed_text: gapic_v1.method.wrap_method( + self.embed_text, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.batch_embed_text: gapic_v1.method.wrap_method( + self.batch_embed_text, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.count_text_tokens: gapic_v1.method.wrap_method( + self.count_text_tokens, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], + Union[ + text_service.GenerateTextResponse, + Awaitable[text_service.GenerateTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], + Union[ + text_service.EmbedTextResponse, Awaitable[text_service.EmbedTextResponse] + ], + ]: + raise NotImplementedError() + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], + Union[ + text_service.BatchEmbedTextResponse, + Awaitable[text_service.BatchEmbedTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], + Union[ + text_service.CountTextTokensResponse, + Awaitable[text_service.CountTextTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc.py new file mode 100644 index 000000000000..fd714eca100e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport + + +class TextServiceGrpcTransport(TextServiceTransport): + """gRPC backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + ~.GenerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + ~.EmbedTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], text_service.BatchEmbedTextResponse + ]: + r"""Return a callable for the batch embed text method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedTextRequest], + ~.BatchEmbedTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_text" not in self._stubs: + self._stubs["batch_embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/BatchEmbedText", + request_serializer=text_service.BatchEmbedTextRequest.serialize, + response_deserializer=text_service.BatchEmbedTextResponse.deserialize, + ) + return self._stubs["batch_embed_text"] + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], text_service.CountTextTokensResponse + ]: + r"""Return a callable for the count text tokens method over gRPC. + + Runs a model's tokenizer on a text and returns the + token count. + + Returns: + Callable[[~.CountTextTokensRequest], + ~.CountTextTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_text_tokens" not in self._stubs: + self._stubs["count_text_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/CountTextTokens", + request_serializer=text_service.CountTextTokensRequest.serialize, + response_deserializer=text_service.CountTextTokensResponse.deserialize, + ) + return self._stubs["count_text_tokens"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bb9ad8b7c2b6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/grpc_asyncio.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .grpc import TextServiceGrpcTransport + + +class TextServiceGrpcAsyncIOTransport(TextServiceTransport): + """gRPC AsyncIO backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], Awaitable[text_service.GenerateTextResponse] + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + Awaitable[~.GenerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], Awaitable[text_service.EmbedTextResponse] + ]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + Awaitable[~.EmbedTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], + Awaitable[text_service.BatchEmbedTextResponse], + ]: + r"""Return a callable for the batch embed text method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedTextRequest], + Awaitable[~.BatchEmbedTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_text" not in self._stubs: + self._stubs["batch_embed_text"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/BatchEmbedText", + request_serializer=text_service.BatchEmbedTextRequest.serialize, + response_deserializer=text_service.BatchEmbedTextResponse.deserialize, + ) + return self._stubs["batch_embed_text"] + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], + Awaitable[text_service.CountTextTokensResponse], + ]: + r"""Return a callable for the count text tokens method over gRPC. + + Runs a model's tokenizer on a text and returns the + token count. + + Returns: + Callable[[~.CountTextTokensRequest], + Awaitable[~.CountTextTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_text_tokens" not in self._stubs: + self._stubs["count_text_tokens"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.TextService/CountTextTokens", + request_serializer=text_service.CountTextTokensRequest.serialize, + response_deserializer=text_service.CountTextTokensResponse.deserialize, + ) + return self._stubs["count_text_tokens"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py new file mode 100644 index 000000000000..c105ca95dc41 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py @@ -0,0 +1,746 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import text_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextServiceRestInterceptor: + """Interceptor for TextService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextServiceRestTransport. + + .. code-block:: python + class MyCustomTextServiceInterceptor(TextServiceRestInterceptor): + def pre_batch_embed_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_embed_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_count_text_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_text_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_embed_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextServiceRestTransport(interceptor=MyCustomTextServiceInterceptor()) + client = TextServiceClient(transport=transport) + + + """ + + def pre_batch_embed_text( + self, + request: text_service.BatchEmbedTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.BatchEmbedTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_embed_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_batch_embed_text( + self, response: text_service.BatchEmbedTextResponse + ) -> text_service.BatchEmbedTextResponse: + """Post-rpc interceptor for batch_embed_text + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + def pre_count_text_tokens( + self, + request: text_service.CountTextTokensRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.CountTextTokensRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for count_text_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_count_text_tokens( + self, response: text_service.CountTextTokensResponse + ) -> text_service.CountTextTokensResponse: + """Post-rpc interceptor for count_text_tokens + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + def pre_embed_text( + self, + request: text_service.EmbedTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.EmbedTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for embed_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_embed_text( + self, response: text_service.EmbedTextResponse + ) -> text_service.EmbedTextResponse: + """Post-rpc interceptor for embed_text + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + def pre_generate_text( + self, + request: text_service.GenerateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[text_service.GenerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_generate_text( + self, response: text_service.GenerateTextResponse + ) -> text_service.GenerateTextResponse: + """Post-rpc interceptor for generate_text + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextServiceRestInterceptor + + +class TextServiceRestTransport(TextServiceTransport): + """REST backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TextServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchEmbedText(TextServiceRestStub): + def __hash__(self): + return hash("BatchEmbedText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.BatchEmbedTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Call the batch embed text method over HTTP. + + Args: + request (~.text_service.BatchEmbedTextRequest): + The request object. Batch request to get a text embedding + from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:batchEmbedText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_embed_text( + request, metadata + ) + pb_request = text_service.BatchEmbedTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.BatchEmbedTextResponse() + pb_resp = text_service.BatchEmbedTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_embed_text(resp) + return resp + + class _CountTextTokens(TextServiceRestStub): + def __hash__(self): + return hash("CountTextTokens") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.CountTextTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Call the count text tokens method over HTTP. + + Args: + request (~.text_service.CountTextTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.CountTextTokensResponse: + A response from ``CountTextTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:countTextTokens", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_count_text_tokens( + request, metadata + ) + pb_request = text_service.CountTextTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.CountTextTokensResponse() + pb_resp = text_service.CountTextTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_count_text_tokens(resp) + return resp + + class _EmbedText(TextServiceRestStub): + def __hash__(self): + return hash("EmbedText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.EmbedTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.EmbedTextResponse: + r"""Call the embed text method over HTTP. + + Args: + request (~.text_service.EmbedTextRequest): + The request object. Request to get a text embedding from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:embedText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_embed_text(request, metadata) + pb_request = text_service.EmbedTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.EmbedTextResponse() + pb_resp = text_service.EmbedTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_embed_text(resp) + return resp + + class _GenerateText(TextServiceRestStub): + def __hash__(self): + return hash("GenerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: text_service.GenerateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> text_service.GenerateTextResponse: + r"""Call the generate text method over HTTP. + + Args: + request (~.text_service.GenerateTextRequest): + The request object. Request to generate a text completion + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.text_service.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:generateText", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{model=tunedModels/*}:generateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_text(request, metadata) + pb_request = text_service.GenerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.GenerateTextResponse() + pb_resp = text_service.GenerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_text(resp) + return resp + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], text_service.BatchEmbedTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEmbedText(self._session, self._host, self._interceptor) # type: ignore + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], text_service.CountTextTokensResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountTextTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedText(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py new file mode 100644 index 000000000000..a040a0d8eb4d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .citation import CitationMetadata, CitationSource +from .content import ( + Blob, + Content, + FunctionCall, + FunctionDeclaration, + FunctionResponse, + GroundingPassage, + GroundingPassages, + Part, + Schema, + Tool, + Type, +) +from .discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .generative_service import ( + AttributionSourceId, + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateAnswerRequest, + GenerateAnswerResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + GroundingAttribution, + SemanticRetrieverConfig, + TaskType, +) +from .model import Model +from .model_service import ( + CreateTunedModelMetadata, + CreateTunedModelRequest, + DeleteTunedModelRequest, + GetModelRequest, + GetTunedModelRequest, + ListModelsRequest, + ListModelsResponse, + ListTunedModelsRequest, + ListTunedModelsResponse, + UpdateTunedModelRequest, +) +from .permission import Permission +from .permission_service import ( + CreatePermissionRequest, + DeletePermissionRequest, + GetPermissionRequest, + ListPermissionsRequest, + ListPermissionsResponse, + TransferOwnershipRequest, + TransferOwnershipResponse, + UpdatePermissionRequest, +) +from .retriever import ( + Chunk, + ChunkData, + Condition, + Corpus, + CustomMetadata, + Document, + MetadataFilter, + StringList, +) +from .retriever_service import ( + BatchCreateChunksRequest, + BatchCreateChunksResponse, + BatchDeleteChunksRequest, + BatchUpdateChunksRequest, + BatchUpdateChunksResponse, + CreateChunkRequest, + CreateCorpusRequest, + CreateDocumentRequest, + DeleteChunkRequest, + DeleteCorpusRequest, + DeleteDocumentRequest, + GetChunkRequest, + GetCorpusRequest, + GetDocumentRequest, + ListChunksRequest, + ListChunksResponse, + ListCorporaRequest, + ListCorporaResponse, + ListDocumentsRequest, + ListDocumentsResponse, + QueryCorpusRequest, + QueryCorpusResponse, + QueryDocumentRequest, + QueryDocumentResponse, + RelevantChunk, + UpdateChunkRequest, + UpdateCorpusRequest, + UpdateDocumentRequest, +) +from .safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .text_service import ( + BatchEmbedTextRequest, + BatchEmbedTextResponse, + CountTextTokensRequest, + CountTextTokensResponse, + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) +from .tuned_model import ( + Dataset, + Hyperparameters, + TunedModel, + TunedModelSource, + TuningExample, + TuningExamples, + TuningSnapshot, + TuningTask, +) + +__all__ = ( + "CitationMetadata", + "CitationSource", + "Blob", + "Content", + "FunctionCall", + "FunctionDeclaration", + "FunctionResponse", + "GroundingPassage", + "GroundingPassages", + "Part", + "Schema", + "Tool", + "Type", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "Example", + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "AttributionSourceId", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "Candidate", + "ContentEmbedding", + "CountTokensRequest", + "CountTokensResponse", + "EmbedContentRequest", + "EmbedContentResponse", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerationConfig", + "GroundingAttribution", + "SemanticRetrieverConfig", + "TaskType", + "Model", + "CreateTunedModelMetadata", + "CreateTunedModelRequest", + "DeleteTunedModelRequest", + "GetModelRequest", + "GetTunedModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "UpdateTunedModelRequest", + "Permission", + "CreatePermissionRequest", + "DeletePermissionRequest", + "GetPermissionRequest", + "ListPermissionsRequest", + "ListPermissionsResponse", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + "UpdatePermissionRequest", + "Chunk", + "ChunkData", + "Condition", + "Corpus", + "CustomMetadata", + "Document", + "MetadataFilter", + "StringList", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "BatchDeleteChunksRequest", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "CreateChunkRequest", + "CreateCorpusRequest", + "CreateDocumentRequest", + "DeleteChunkRequest", + "DeleteCorpusRequest", + "DeleteDocumentRequest", + "GetChunkRequest", + "GetCorpusRequest", + "GetDocumentRequest", + "ListChunksRequest", + "ListChunksResponse", + "ListCorporaRequest", + "ListCorporaResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "QueryCorpusRequest", + "QueryCorpusResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "RelevantChunk", + "UpdateChunkRequest", + "UpdateCorpusRequest", + "UpdateDocumentRequest", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "HarmCategory", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "CountTextTokensRequest", + "CountTextTokensResponse", + "Embedding", + "EmbedTextRequest", + "EmbedTextResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "TextCompletion", + "TextPrompt", + "Dataset", + "Hyperparameters", + "TunedModel", + "TunedModelSource", + "TuningExample", + "TuningExamples", + "TuningSnapshot", + "TuningTask", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/citation.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/citation.py new file mode 100644 index 000000000000..cbeb43c1edbd --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/citation.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "CitationMetadata", + "CitationSource", + }, +) + + +class CitationMetadata(proto.Message): + r"""A collection of source attributions for a piece of content. + + Attributes: + citation_sources (MutableSequence[google.ai.generativelanguage_v1beta.types.CitationSource]): + Citations to sources for a specific response. + """ + + citation_sources: MutableSequence["CitationSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CitationSource", + ) + + +class CitationSource(proto.Message): + r"""A citation to a source for a portion of a specific response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_index (int): + Optional. Start of segment of the response + that is attributed to this source. + + Index indicates the start of the segment, + measured in bytes. + + This field is a member of `oneof`_ ``_start_index``. + end_index (int): + Optional. End of the attributed segment, + exclusive. + + This field is a member of `oneof`_ ``_end_index``. + uri (str): + Optional. URI that is attributed as a source + for a portion of the text. + + This field is a member of `oneof`_ ``_uri``. + license_ (str): + Optional. License for the GitHub project that + is attributed as a source for segment. + + License info is required for code citations. + + This field is a member of `oneof`_ ``_license``. + """ + + start_index: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_index: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + license_: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py new file mode 100644 index 000000000000..b77ec51e7265 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -0,0 +1,449 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "Type", + "Content", + "Part", + "Blob", + "Tool", + "FunctionDeclaration", + "FunctionCall", + "FunctionResponse", + "Schema", + "GroundingPassage", + "GroundingPassages", + }, +) + + +class Type(proto.Enum): + r"""Type contains the list of OpenAPI data types as defined by + https://spec.openapis.org/oas/v3.0.3#data-types + + Values: + TYPE_UNSPECIFIED (0): + Not specified, should not be used. + STRING (1): + String type. + NUMBER (2): + Number type. + INTEGER (3): + Integer type. + BOOLEAN (4): + Boolean type. + ARRAY (5): + Array type. + OBJECT (6): + Object type. + """ + TYPE_UNSPECIFIED = 0 + STRING = 1 + NUMBER = 2 + INTEGER = 3 + BOOLEAN = 4 + ARRAY = 5 + OBJECT = 6 + + +class Content(proto.Message): + r"""The base structured datatype containing multi-part content of a + message. + + A ``Content`` includes a ``role`` field designating the producer of + the ``Content`` and a ``parts`` field containing multi-part data + that contains the content of the message turn. + + Attributes: + parts (MutableSequence[google.ai.generativelanguage_v1beta.types.Part]): + Ordered ``Parts`` that constitute a single message. Parts + may have different MIME types. + role (str): + Optional. The producer of the content. Must + be either 'user' or 'model'. + Useful to set for multi-turn conversations, + otherwise can be left blank or unset. + """ + + parts: MutableSequence["Part"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Part", + ) + role: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Part(proto.Message): + r"""A datatype containing media that is part of a multi-part ``Content`` + message. + + A ``Part`` consists of data which has an associated datatype. A + ``Part`` can only contain one of the accepted types in + ``Part.data``. + + A ``Part`` must have a fixed IANA MIME type identifying the type and + subtype of the media if the ``inline_data`` field is filled with raw + bytes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + Inline text. + + This field is a member of `oneof`_ ``data``. + inline_data (google.ai.generativelanguage_v1beta.types.Blob): + Inline media bytes. + + This field is a member of `oneof`_ ``data``. + function_call (google.ai.generativelanguage_v1beta.types.FunctionCall): + A predicted ``FunctionCall`` returned from the model that + contains a string representing the + ``FunctionDeclaration.name`` with the arguments and their + values. + + This field is a member of `oneof`_ ``data``. + function_response (google.ai.generativelanguage_v1beta.types.FunctionResponse): + The result output of a ``FunctionCall`` that contains a + string representing the ``FunctionDeclaration.name`` and a + structured JSON object containing any output from the + function is used as context to the model. + + This field is a member of `oneof`_ ``data``. + """ + + text: str = proto.Field( + proto.STRING, + number=2, + oneof="data", + ) + inline_data: "Blob" = proto.Field( + proto.MESSAGE, + number=3, + oneof="data", + message="Blob", + ) + function_call: "FunctionCall" = proto.Field( + proto.MESSAGE, + number=4, + oneof="data", + message="FunctionCall", + ) + function_response: "FunctionResponse" = proto.Field( + proto.MESSAGE, + number=5, + oneof="data", + message="FunctionResponse", + ) + + +class Blob(proto.Message): + r"""Raw media bytes. + + Text should not be sent as raw bytes, use the 'text' field. + + Attributes: + mime_type (str): + The IANA standard MIME type of the source + data. Accepted types include: "image/png", + "image/jpeg", "image/heic", "image/heif", + "image/webp". + data (bytes): + Raw bytes for media formats. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class Tool(proto.Message): + r"""Tool details that the model may use to generate response. + + A ``Tool`` is a piece of code that enables the system to interact + with external systems to perform an action, or set of actions, + outside of knowledge and scope of the model. + + Attributes: + function_declarations (MutableSequence[google.ai.generativelanguage_v1beta.types.FunctionDeclaration]): + Optional. A list of ``FunctionDeclarations`` available to + the model that can be used for function calling. + + The model or system does not execute the function. Instead + the defined function may be returned as a + [FunctionCall][content.part.function_call] with arguments to + the client side for execution. The model may decide to call + a subset of these functions by populating + [FunctionCall][content.part.function_call] in the response. + The next conversation turn may contain a + [FunctionResponse][content.part.function_response] with the + [content.role] "function" generation context for the next + model turn. + """ + + function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FunctionDeclaration", + ) + + +class FunctionDeclaration(proto.Message): + r"""Structured representation of a function declaration as defined by + the `OpenAPI 3.03 + specification `__. Included in + this declaration are the function name and parameters. This + FunctionDeclaration is a representation of a block of code that can + be used as a ``Tool`` by the model and executed by the client. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the function. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + description (str): + Required. A brief description of the + function. + parameters (google.ai.generativelanguage_v1beta.types.Schema): + Optional. Describes the parameters to this + function. Reflects the Open API 3.03 Parameter + Object string Key: the name of the parameter. + Parameter names are case sensitive. Schema + Value: the Schema defining the type used for the + parameter. + + This field is a member of `oneof`_ ``_parameters``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + parameters: "Schema" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Schema", + ) + + +class FunctionCall(proto.Message): + r"""A predicted ``FunctionCall`` returned from the model that contains a + string representing the ``FunctionDeclaration.name`` with the + arguments and their values. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the function to call. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + args (google.protobuf.struct_pb2.Struct): + Optional. The function parameters and values + in JSON object format. + + This field is a member of `oneof`_ ``_args``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + args: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=struct_pb2.Struct, + ) + + +class FunctionResponse(proto.Message): + r"""The result output from a ``FunctionCall`` that contains a string + representing the ``FunctionDeclaration.name`` and a structured JSON + object containing any output from the function is used as context to + the model. This should contain the result of a\ ``FunctionCall`` + made based on model prediction. + + Attributes: + name (str): + Required. The name of the function to call. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + response (google.protobuf.struct_pb2.Struct): + Required. The function response in JSON + object format. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class Schema(proto.Message): + r"""The ``Schema`` object allows the definition of input and output data + types. These types can be objects, but also primitives and arrays. + Represents a select subset of an `OpenAPI 3.0 schema + object `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.ai.generativelanguage_v1beta.types.Type): + Required. Data type. + format_ (str): + Optional. The format of the data. This is + used only for primitive datatypes. Supported + formats: + + for NUMBER type: float, double + for INTEGER type: int32, int64 + description (str): + Optional. A brief description of the + parameter. This could contain examples of use. + Parameter description may be formatted as + Markdown. + nullable (bool): + Optional. Indicates if the value may be null. + enum (MutableSequence[str]): + Optional. Possible values of the element of Type.STRING with + enum format. For example we can define an Enum Direction as + : {type:STRING, format:enum, enum:["EAST", NORTH", "SOUTH", + "WEST"]} + items (google.ai.generativelanguage_v1beta.types.Schema): + Optional. Schema of the elements of + Type.ARRAY. + + This field is a member of `oneof`_ ``_items``. + properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): + Optional. Properties of Type.OBJECT. + required (MutableSequence[str]): + Optional. Required properties of Type.OBJECT. + """ + + type_: "Type" = proto.Field( + proto.ENUM, + number=1, + enum="Type", + ) + format_: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=4, + ) + enum: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + items: "Schema" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Schema", + ) + properties: MutableMapping[str, "Schema"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message="Schema", + ) + required: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class GroundingPassage(proto.Message): + r"""Passage included inline with a grounding configuration. + + Attributes: + id (str): + Identifier for the passage for attributing + this passage in grounded answers. + content (google.ai.generativelanguage_v1beta.types.Content): + Content of the passage. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + content: "Content" = proto.Field( + proto.MESSAGE, + number=2, + message="Content", + ) + + +class GroundingPassages(proto.Message): + r"""A repeated list of passages. + + Attributes: + passages (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingPassage]): + List of passages. + """ + + passages: MutableSequence["GroundingPassage"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GroundingPassage", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/discuss_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/discuss_service.py new file mode 100644 index 000000000000..18ba818aaf49 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/discuss_service.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "Example", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + }, +) + + +class GenerateMessageRequest(proto.Message): + r"""Request to generate a message response from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + prompt (google.ai.generativelanguage_v1beta.types.MessagePrompt): + Required. The structured textual input given + to the model as a prompt. + Given a + prompt, the model will return what it predicts + is the next message in the discussion. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If unset, + this will default to ``1``. + + This field is a member of `oneof`_ ``_candidate_count``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. + + This field is a member of `oneof`_ ``_top_k``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=6, + optional=True, + ) + + +class GenerateMessageResponse(proto.Message): + r"""The response from the model. + + This includes candidate messages and + conversation history in the form of chronologically-ordered + messages. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.Message]): + Candidate response messages from the model. + messages (MutableSequence[google.ai.generativelanguage_v1beta.types.Message]): + The conversation history used by the model. + filters (MutableSequence[google.ai.generativelanguage_v1beta.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. + """ + + candidates: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Message", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Message", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + + +class Message(proto.Message): + r"""The base unit of structured text. + + A ``Message`` includes an ``author`` and the ``content`` of the + ``Message``. + + The ``author`` is used to tag messages when they are fed to the + model as text. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + author (str): + Optional. The author of this Message. + + This serves as a key for tagging + the content of this Message when it is fed to + the model as text. + + The author can be any alphanumeric string. + content (str): + Required. The text content of the structured ``Message``. + citation_metadata (google.ai.generativelanguage_v1beta.types.CitationMetadata): + Output only. Citation information for model-generated + ``content`` in this ``Message``. + + If this ``Message`` was generated as output from the model, + this field may be populated with attribution information for + any text included in the ``content``. This field is used + only on output. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + author: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class MessagePrompt(proto.Message): + r"""All of the structured input text passed to the model as a prompt. + + A ``MessagePrompt`` contains a structured set of fields that provide + context for the conversation, examples of user input/model output + message pairs that prime the model to respond in different ways, and + the conversation history or list of messages representing the + alternating turns of the conversation between the user and the + model. + + Attributes: + context (str): + Optional. Text that should be provided to the model first to + ground the response. + + If not empty, this ``context`` will be given to the model + first before the ``examples`` and ``messages``. When using a + ``context`` be sure to provide it with every request to + maintain continuity. + + This field can be a description of your prompt to the model + to help provide context and guide the responses. Examples: + "Translate the phrase from English to French." or "Given a + statement, classify the sentiment as happy, sad or neutral." + + Anything included in this field will take precedence over + message history if the total input size exceeds the model's + ``input_token_limit`` and the input request is truncated. + examples (MutableSequence[google.ai.generativelanguage_v1beta.types.Example]): + Optional. Examples of what the model should generate. + + This includes both user input and the response that the + model should emulate. + + These ``examples`` are treated identically to conversation + messages except that they take precedence over the history + in ``messages``: If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated. Items + will be dropped from ``messages`` before ``examples``. + messages (MutableSequence[google.ai.generativelanguage_v1beta.types.Message]): + Required. A snapshot of the recent conversation history + sorted chronologically. + + Turns alternate between two authors. + + If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated: The + oldest items will be dropped from ``messages``. + """ + + context: str = proto.Field( + proto.STRING, + number=1, + ) + examples: MutableSequence["Example"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Example", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Message", + ) + + +class Example(proto.Message): + r"""An input/output example used to instruct the Model. + + It demonstrates how the model should respond or format its + response. + + Attributes: + input (google.ai.generativelanguage_v1beta.types.Message): + Required. An example of an input ``Message`` from the user. + output (google.ai.generativelanguage_v1beta.types.Message): + Required. An example of what the model should + output given the input. + """ + + input: "Message" = proto.Field( + proto.MESSAGE, + number=1, + message="Message", + ) + output: "Message" = proto.Field( + proto.MESSAGE, + number=2, + message="Message", + ) + + +class CountMessageTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + prompt (google.ai.generativelanguage_v1beta.types.MessagePrompt): + Required. The prompt, whose token count is to + be returned. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + + +class CountMessageTokensResponse(proto.Message): + r"""A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + token_count (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py new file mode 100644 index 000000000000..89f5cdead12e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -0,0 +1,1028 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import citation +from google.ai.generativelanguage_v1beta.types import content as gag_content +from google.ai.generativelanguage_v1beta.types import retriever, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "TaskType", + "GenerateContentRequest", + "GenerationConfig", + "SemanticRetrieverConfig", + "GenerateContentResponse", + "Candidate", + "AttributionSourceId", + "GroundingAttribution", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "EmbedContentRequest", + "ContentEmbedding", + "EmbedContentResponse", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "CountTokensRequest", + "CountTokensResponse", + }, +) + + +class TaskType(proto.Enum): + r"""Type of task for which the embedding will be used. + + Values: + TASK_TYPE_UNSPECIFIED (0): + Unset value, which will default to one of the + other enum values. + RETRIEVAL_QUERY (1): + Specifies the given text is a query in a + search/retrieval setting. + RETRIEVAL_DOCUMENT (2): + Specifies the given text is a document from + the corpus being searched. + SEMANTIC_SIMILARITY (3): + Specifies the given text will be used for + STS. + CLASSIFICATION (4): + Specifies that the given text will be + classified. + CLUSTERING (5): + Specifies that the embeddings will be used + for clustering. + """ + TASK_TYPE_UNSPECIFIED = 0 + RETRIEVAL_QUERY = 1 + RETRIEVAL_DOCUMENT = 2 + SEMANTIC_SIMILARITY = 3 + CLASSIFICATION = 4 + CLUSTERING = 5 + + +class GenerateContentRequest(proto.Message): + r"""Request to generate a completion from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the ``Model`` to use for generating + the completion. + + Format: ``name=models/{model}``. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The content of the current + conversation with the model. + For single-turn queries, this is a single + instance. For multi-turn queries, this is a + repeated field that contains conversation + history + latest request. + tools (MutableSequence[google.ai.generativelanguage_v1beta.types.Tool]): + Optional. A list of ``Tools`` the model may use to generate + the next response. + + A ``Tool`` is a piece of code that enables the system to + interact with external systems to perform an action, or set + of actions, outside of knowledge and scope of the model. The + only supported tool is currently ``Function``. + safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + This will be enforced on the + ``GenerateContentRequest.contents`` and + ``GenerateContentResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any contents and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT + are supported. + generation_config (google.ai.generativelanguage_v1beta.types.GenerationConfig): + Optional. Configuration options for model + generation and outputs. + + This field is a member of `oneof`_ ``_generation_config``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + tools: MutableSequence[gag_content.Tool] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=gag_content.Tool, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.SafetySetting, + ) + generation_config: "GenerationConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="GenerationConfig", + ) + + +class GenerationConfig(proto.Message): + r"""Configuration options for model generation and outputs. Not + all parameters may be configurable for every model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, this + will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + stop_sequences (MutableSequence[str]): + Optional. The set of character sequences (up + to 5) that will stop output generation. If + specified, the API will stop at the first + appearance of a stop sequence. The stop sequence + will not be included as part of the response. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit specified + in the ``Model`` specification. + + This field is a member of `oneof`_ ``_max_output_tokens``. + temperature (float): + Optional. Controls the randomness of the output. Note: The + default value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most likely tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits number of tokens + based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_k``. + """ + + candidate_count: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + + +class SemanticRetrieverConfig(proto.Message): + r"""Configuration for retrieving grounding content from a ``Corpus`` or + ``Document`` created using the Semantic Retriever API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source (str): + Required. Name of the resource for retrieval, + e.g. corpora/123 or corpora/123/documents/abc. + query (google.ai.generativelanguage_v1beta.types.Content): + Required. Query to use for similarity matching ``Chunk``\ s + in the given resource. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1beta.types.MetadataFilter]): + Optional. Filters for selecting ``Document``\ s and/or + ``Chunk``\ s from the resource. + max_chunks_count (int): + Optional. Maximum number of relevant ``Chunk``\ s to + retrieve. + + This field is a member of `oneof`_ ``_max_chunks_count``. + minimum_relevance_score (float): + Optional. Minimum relevance score for retrieved relevant + ``Chunk``\ s. + + This field is a member of `oneof`_ ``_minimum_relevance_score``. + """ + + source: str = proto.Field( + proto.STRING, + number=1, + ) + query: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=retriever.MetadataFilter, + ) + max_chunks_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + minimum_relevance_score: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + + +class GenerateContentResponse(proto.Message): + r"""Response from the model supporting multiple candidates. + + Note on safety ratings and content filtering. They are reported for + both prompt in ``GenerateContentResponse.prompt_feedback`` and for + each candidate in ``finish_reason`` and in ``safety_ratings``. The + API contract is that: + + - either all requested candidates are returned or no candidates at + all + - no candidates are returned only if there was something wrong with + the prompt (see ``prompt_feedback``) + - feedback on each candidate is reported on ``finish_reason`` and + ``safety_ratings``. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.Candidate]): + Candidate responses from the model. + prompt_feedback (google.ai.generativelanguage_v1beta.types.GenerateContentResponse.PromptFeedback): + Returns the prompt's feedback related to the + content filters. + """ + + class PromptFeedback(proto.Message): + r"""A set of the feedback metadata the prompt specified in + ``GenerateContentRequest.content``. + + Attributes: + block_reason (google.ai.generativelanguage_v1beta.types.GenerateContentResponse.PromptFeedback.BlockReason): + Optional. If set, the prompt was blocked and + no candidates are returned. Rephrase your + prompt. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): + Ratings for safety of the prompt. + There is at most one rating per category. + """ + + class BlockReason(proto.Enum): + r"""Specifies what was the reason why prompt was blocked. + + Values: + BLOCK_REASON_UNSPECIFIED (0): + Default value. This value is unused. + SAFETY (1): + Prompt was blocked due to safety reasons. You can inspect + ``safety_ratings`` to understand which safety category + blocked it. + OTHER (2): + Prompt was blocked due to unknown reaasons. + """ + BLOCK_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( + proto.Field( + proto.ENUM, + number=1, + enum="GenerateContentResponse.PromptFeedback.BlockReason", + ) + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + + candidates: MutableSequence["Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Candidate", + ) + prompt_feedback: PromptFeedback = proto.Field( + proto.MESSAGE, + number=2, + message=PromptFeedback, + ) + + +class Candidate(proto.Message): + r"""A response candidate generated from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + index (int): + Output only. Index of the candidate in the + list of candidates. + + This field is a member of `oneof`_ ``_index``. + content (google.ai.generativelanguage_v1beta.types.Content): + Output only. Generated content returned from + the model. + finish_reason (google.ai.generativelanguage_v1beta.types.Candidate.FinishReason): + Optional. Output only. The reason why the + model stopped generating tokens. + If empty, the model has not stopped generating + the tokens. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): + List of ratings for the safety of a response + candidate. + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1beta.types.CitationMetadata): + Output only. Citation information for model-generated + candidate. + + This field may be populated with recitation information for + any text included in the ``content``. These are passages + that are "recited" from copyrighted material in the + foundational LLM's training data. + token_count (int): + Output only. Token count for this candidate. + grounding_attributions (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingAttribution]): + Output only. Attribution information for sources that + contributed to a grounded answer. + + This field is populated for ``GenerateAnswer`` calls. + """ + + class FinishReason(proto.Enum): + r"""Defines the reason why the model stopped generating tokens. + + Values: + FINISH_REASON_UNSPECIFIED (0): + Default value. This value is unused. + STOP (1): + Natural stop point of the model or provided + stop sequence. + MAX_TOKENS (2): + The maximum number of tokens as specified in + the request was reached. + SAFETY (3): + The candidate content was flagged for safety + reasons. + RECITATION (4): + The candidate content was flagged for + recitation reasons. + OTHER (5): + Unknown reason. + """ + FINISH_REASON_UNSPECIFIED = 0 + STOP = 1 + MAX_TOKENS = 2 + SAFETY = 3 + RECITATION = 4 + OTHER = 5 + + index: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=1, + message=gag_content.Content, + ) + finish_reason: FinishReason = proto.Field( + proto.ENUM, + number=2, + enum=FinishReason, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=6, + message=citation.CitationMetadata, + ) + token_count: int = proto.Field( + proto.INT32, + number=7, + ) + grounding_attributions: MutableSequence[ + "GroundingAttribution" + ] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="GroundingAttribution", + ) + + +class AttributionSourceId(proto.Message): + r"""Identifier for the source contributing to this attribution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + grounding_passage (google.ai.generativelanguage_v1beta.types.AttributionSourceId.GroundingPassageId): + Identifier for an inline passage. + + This field is a member of `oneof`_ ``source``. + semantic_retriever_chunk (google.ai.generativelanguage_v1beta.types.AttributionSourceId.SemanticRetrieverChunk): + Identifier for a ``Chunk`` fetched via Semantic Retriever. + + This field is a member of `oneof`_ ``source``. + """ + + class GroundingPassageId(proto.Message): + r"""Identifier for a part within a ``GroundingPassage``. + + Attributes: + passage_id (str): + Output only. ID of the passage matching the + ``GenerateAnswerRequest``'s ``GroundingPassage.id``. + part_index (int): + Output only. Index of the part within the + ``GenerateAnswerRequest``'s ``GroundingPassage.content``. + """ + + passage_id: str = proto.Field( + proto.STRING, + number=1, + ) + part_index: int = proto.Field( + proto.INT32, + number=2, + ) + + class SemanticRetrieverChunk(proto.Message): + r"""Identifier for a ``Chunk`` retrieved via Semantic Retriever + specified in the ``GenerateAnswerRequest`` using + ``SemanticRetrieverConfig``. + + Attributes: + source (str): + Output only. Name of the source matching the request's + ``SemanticRetrieverConfig.source``. Example: ``corpora/123`` + or ``corpora/123/documents/abc`` + chunk (str): + Output only. Name of the ``Chunk`` containing the attributed + text. Example: ``corpora/123/documents/abc/chunks/xyz`` + """ + + source: str = proto.Field( + proto.STRING, + number=1, + ) + chunk: str = proto.Field( + proto.STRING, + number=2, + ) + + grounding_passage: GroundingPassageId = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message=GroundingPassageId, + ) + semantic_retriever_chunk: SemanticRetrieverChunk = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=SemanticRetrieverChunk, + ) + + +class GroundingAttribution(proto.Message): + r"""Attribution for a source that contributed to an answer. + + Attributes: + source_id (google.ai.generativelanguage_v1beta.types.AttributionSourceId): + Output only. Identifier for the source + contributing to this attribution. + content (google.ai.generativelanguage_v1beta.types.Content): + Grounding source content that makes up this + attribution. + """ + + source_id: "AttributionSourceId" = proto.Field( + proto.MESSAGE, + number=3, + message="AttributionSourceId", + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + + +class GenerateAnswerRequest(proto.Message): + r"""Request to generate a grounded answer from the model. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_passages (google.ai.generativelanguage_v1beta.types.GroundingPassages): + Passages provided inline with the request. + + This field is a member of `oneof`_ ``grounding_source``. + semantic_retriever (google.ai.generativelanguage_v1beta.types.SemanticRetrieverConfig): + Content retrieved from resources created via + the Semantic Retriever API. + + This field is a member of `oneof`_ ``grounding_source``. + model (str): + Required. The name of the ``Model`` to use for generating + the grounded response. + + Format: ``model=models/{model}``. + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The content of the current conversation with the + model. For single-turn queries, this is a single question to + answer. For multi-turn queries, this is a repeated field + that contains conversation history and the last ``Content`` + in the list containing the question. + + Note: GenerateAnswer currently only supports queries in + English. + answer_style (google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle): + Required. Style in which answers should be + returned. + safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any contents and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. A low + temperature (~0.2) is usually recommended for + Attributed-Question-Answering use cases. + + This field is a member of `oneof`_ ``_temperature``. + """ + + class AnswerStyle(proto.Enum): + r"""Style for grounded answers. + + Values: + ANSWER_STYLE_UNSPECIFIED (0): + Unspecified answer style. + ABSTRACTIVE (1): + Succint but abstract style. + EXTRACTIVE (2): + Very brief and extractive style. + VERBOSE (3): + Verbose style including extra details. The + response may be formatted as a sentence, + paragraph, multiple paragraphs, or bullet + points, etc. + """ + ANSWER_STYLE_UNSPECIFIED = 0 + ABSTRACTIVE = 1 + EXTRACTIVE = 2 + VERBOSE = 3 + + inline_passages: gag_content.GroundingPassages = proto.Field( + proto.MESSAGE, + number=6, + oneof="grounding_source", + message=gag_content.GroundingPassages, + ) + semantic_retriever: "SemanticRetrieverConfig" = proto.Field( + proto.MESSAGE, + number=7, + oneof="grounding_source", + message="SemanticRetrieverConfig", + ) + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + answer_style: AnswerStyle = proto.Field( + proto.ENUM, + number=5, + enum=AnswerStyle, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.SafetySetting, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=4, + optional=True, + ) + + +class GenerateAnswerResponse(proto.Message): + r"""Response from the model for a grounded answer. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + answer (google.ai.generativelanguage_v1beta.types.Candidate): + Candidate answer from the model. + + Note: The model *always* attempts to provide a grounded + answer, even when the answer is unlikely to be answerable + from the given passages. In that case, a low-quality or + ungrounded answer may be provided, along with a low + ``answerable_probability``. + answerable_probability (float): + Output only. The model's estimate of the probability that + its answer is correct and grounded in the input passages. + + A low answerable_probability indicates that the answer might + not be grounded in the sources. + + When ``answerable_probability`` is low, some clients may + wish to: + + - Display a message to the effect of "We couldn’t answer + that question" to the user. + - Fall back to a general-purpose LLM that answers the + question from world knowledge. The threshold and nature + of such fallbacks will depend on individual clients’ use + cases. 0.5 is a good starting threshold. + + This field is a member of `oneof`_ ``_answerable_probability``. + input_feedback (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback): + Output only. Feedback related to the input data used to + answer the question, as opposed to model-generated response + to the question. + + "Input data" can be one or more of the following: + + - Question specified by the last entry in + ``GenerateAnswerRequest.content`` + - Conversation history specified by the other entries in + ``GenerateAnswerRequest.content`` + - Grounding sources + (``GenerateAnswerRequest.semantic_retriever`` or + ``GenerateAnswerRequest.inline_passages``) + + This field is a member of `oneof`_ ``_input_feedback``. + """ + + class InputFeedback(proto.Message): + r"""Feedback related to the input data used to answer the + question, as opposed to model-generated response to the + question. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + block_reason (google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse.InputFeedback.BlockReason): + Optional. If set, the input was blocked and + no candidates are returned. Rephrase your input. + + This field is a member of `oneof`_ ``_block_reason``. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): + Ratings for safety of the input. + There is at most one rating per category. + """ + + class BlockReason(proto.Enum): + r"""Specifies what was the reason why input was blocked. + + Values: + BLOCK_REASON_UNSPECIFIED (0): + Default value. This value is unused. + SAFETY (1): + Input was blocked due to safety reasons. You can inspect + ``safety_ratings`` to understand which safety category + blocked it. + OTHER (2): + Input was blocked due to other reasons. + """ + BLOCK_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + block_reason: "GenerateAnswerResponse.InputFeedback.BlockReason" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="GenerateAnswerResponse.InputFeedback.BlockReason", + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + + answer: "Candidate" = proto.Field( + proto.MESSAGE, + number=1, + message="Candidate", + ) + answerable_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + input_feedback: InputFeedback = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=InputFeedback, + ) + + +class EmbedContentRequest(proto.Message): + r"""Request containing the ``Content`` for the model to embed. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + content (google.ai.generativelanguage_v1beta.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + task_type (google.ai.generativelanguage_v1beta.types.TaskType): + Optional. Optional task type for which the embeddings will + be used. Can only be set for ``models/embedding-001``. + + This field is a member of `oneof`_ ``_task_type``. + title (str): + Optional. An optional title for the text. Only applicable + when TaskType is ``RETRIEVAL_DOCUMENT``. + + Note: Specifying a ``title`` for ``RETRIEVAL_DOCUMENT`` + provides better quality embeddings for retrieval. + + This field is a member of `oneof`_ ``_title``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + task_type: "TaskType" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="TaskType", + ) + title: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class ContentEmbedding(proto.Message): + r"""A list of floats representing an embedding. + + Attributes: + values (MutableSequence[float]): + The embedding values. + """ + + values: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +class EmbedContentResponse(proto.Message): + r"""The response to an ``EmbedContentRequest``. + + Attributes: + embedding (google.ai.generativelanguage_v1beta.types.ContentEmbedding): + Output only. The embedding generated from the + input content. + """ + + embedding: "ContentEmbedding" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class BatchEmbedContentsRequest(proto.Message): + r"""Batch request to get embeddings from the model for a list of + prompts. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in each of + these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["EmbedContentRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="EmbedContentRequest", + ) + + +class BatchEmbedContentsResponse(proto.Message): + r"""The response to a ``BatchEmbedContentsRequest``. + + Attributes: + embeddings (MutableSequence[google.ai.generativelanguage_v1beta.types.ContentEmbedding]): + Output only. The embeddings for each request, + in the same order as provided in the batch + request. + """ + + embeddings: MutableSequence["ContentEmbedding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class CountTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]): + Required. The input given to the model as a + prompt. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + + +class CountTokensResponse(proto.Message): + r"""A response from ``CountTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + total_tokens (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + total_tokens: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py new file mode 100644 index 000000000000..977380fb6418 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "Model", + }, +) + + +class Model(proto.Message): + r"""Information about a Generative Language Model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the ``Model``. + + Format: ``models/{model}`` with a ``{model}`` naming + convention of: + + - "{base_model_id}-{version}" + + Examples: + + - ``models/chat-bison-001`` + base_model_id (str): + Required. The name of the base model, pass this to the + generation request. + + Examples: + + - ``chat-bison`` + version (str): + Required. The version number of the model. + + This represents the major version + display_name (str): + The human-readable name of the model. E.g. + "Chat Bison". + The name can be up to 128 characters long and + can consist of any UTF-8 characters. + description (str): + A short description of the model. + input_token_limit (int): + Maximum number of input tokens allowed for + this model. + output_token_limit (int): + Maximum number of output tokens available for + this model. + supported_generation_methods (MutableSequence[str]): + The model's supported generation methods. + + The method names are defined as Pascal case strings, such as + ``generateMessage`` which correspond to API methods. + temperature (float): + Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. This + value specifies default to be used by the backend while + making the call to the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + For Nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. This value specifies + default to be used by the backend while making the call to + the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. + + This field is a member of `oneof`_ ``_top_k``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + base_model_id: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + input_token_limit: int = proto.Field( + proto.INT32, + number=6, + ) + output_token_limit: int = proto.Field( + proto.INT32, + number=7, + ) + supported_generation_methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=10, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=11, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py new file mode 100644 index 000000000000..ed2043541989 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/model_service.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "GetTunedModelRequest", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "CreateTunedModelRequest", + "CreateTunedModelMetadata", + "UpdateTunedModelRequest", + "DeleteTunedModelRequest", + }, +) + + +class GetModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListModelsRequest(proto.Message): + r"""Request for listing all Models. + + Attributes: + page_size (int): + The maximum number of ``Models`` to return (per page). + + The service may return fewer models. If unspecified, at most + 50 models will be returned per page. This method returns at + most 1000 models per page, even if you pass a larger + page_size. + page_token (str): + A page token, received from a previous ``ListModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListModelsResponse(proto.Message): + r"""Response from ``ListModel`` containing a paginated list of Models. + + Attributes: + models (MutableSequence[google.ai.generativelanguage_v1beta.types.Model]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + models: MutableSequence[model.Model] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=model.Model, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTunedModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTunedModelsRequest(proto.Message): + r"""Request for listing TunedModels. + + Attributes: + page_size (int): + Optional. The maximum number of ``TunedModels`` to return + (per page). The service may return fewer tuned models. + + If unspecified, at most 10 tuned models will be returned. + This method returns at most 1000 models per page, even if + you pass a larger page_size. + page_token (str): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided the + page token. + filter (str): + Optional. A filter is a full text search over + the tuned model's description and display name. + By default, results will not include tuned + models shared with everyone. + + Additional operators: + + - owner:me + - writers:me + - readers:me + - readers:everyone + + Examples: + + "owner:me" returns all tuned models to which + caller has owner role "readers:me" returns all + tuned models to which caller has reader role + "readers:everyone" returns all tuned models that + are shared with everyone + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTunedModelsResponse(proto.Message): + r"""Response from ``ListTunedModels`` containing a paginated list of + Models. + + Attributes: + tuned_models (MutableSequence[google.ai.generativelanguage_v1beta.types.TunedModel]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + tuned_models: MutableSequence[gag_tuned_model.TunedModel] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_tuned_model.TunedModel, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTunedModelRequest(proto.Message): + r"""Request to create a TunedModel. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tuned_model_id (str): + Optional. The unique id for the tuned model if specified. + This value should be up to 40 characters, the first + character must be a letter, the last could be a letter or a + number. The id must match the regular expression: + `a-z <[a-z0-9-]{0,38}[a-z0-9]>`__?. + + This field is a member of `oneof`_ ``_tuned_model_id``. + tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): + Required. The tuned model to create. + """ + + tuned_model_id: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tuned_model: gag_tuned_model.TunedModel = proto.Field( + proto.MESSAGE, + number=2, + message=gag_tuned_model.TunedModel, + ) + + +class CreateTunedModelMetadata(proto.Message): + r"""Metadata about the state and progress of creating a tuned + model returned from the long-running operation + + Attributes: + tuned_model (str): + Name of the tuned model associated with the + tuning operation. + total_steps (int): + The total number of tuning steps. + completed_steps (int): + The number of steps completed. + completed_percent (float): + The completed percentage for the tuning + operation. + snapshots (MutableSequence[google.ai.generativelanguage_v1beta.types.TuningSnapshot]): + Metrics collected during tuning. + """ + + tuned_model: str = proto.Field( + proto.STRING, + number=5, + ) + total_steps: int = proto.Field( + proto.INT32, + number=1, + ) + completed_steps: int = proto.Field( + proto.INT32, + number=2, + ) + completed_percent: float = proto.Field( + proto.FLOAT, + number=3, + ) + snapshots: MutableSequence[gag_tuned_model.TuningSnapshot] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=gag_tuned_model.TuningSnapshot, + ) + + +class UpdateTunedModelRequest(proto.Message): + r"""Request to update a TunedModel. + + Attributes: + tuned_model (google.ai.generativelanguage_v1beta.types.TunedModel): + Required. The tuned model to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + tuned_model: gag_tuned_model.TunedModel = proto.Field( + proto.MESSAGE, + number=1, + message=gag_tuned_model.TunedModel, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTunedModelRequest(proto.Message): + r"""Request to delete a TunedModel. + + Attributes: + name (str): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission.py new file mode 100644 index 000000000000..06f02b924f66 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "Permission", + }, +) + + +class Permission(proto.Message): + r"""Permission resource grants user, group or the rest of the + world access to the PaLM API resource (e.g. a tuned model, + corpus). + + A role is a collection of permitted operations that allows users + to perform specific actions on PaLM API resources. To make them + available to users, groups, or service accounts, you assign + roles. When you assign a role, you grant permissions that the + role contains. + + There are three concentric roles. Each role is a superset of the + previous role's permitted operations: + + - reader can use the resource (e.g. tuned model, corpus) for + inference + - writer has reader's permissions and additionally can edit and + share + - owner has writer's permissions and additionally can delete + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The permission name. A unique name + will be generated on create. Examples: + tunedModels/{tuned_model}/permissions/{permission} + corpora/{corpus}/permissions/{permission} Output only. + grantee_type (google.ai.generativelanguage_v1beta.types.Permission.GranteeType): + Optional. Immutable. The type of the grantee. + + This field is a member of `oneof`_ ``_grantee_type``. + email_address (str): + Optional. Immutable. The email address of the + user of group which this permission refers. + Field is not set when permission's grantee type + is EVERYONE. + + This field is a member of `oneof`_ ``_email_address``. + role (google.ai.generativelanguage_v1beta.types.Permission.Role): + Required. The role granted by this + permission. + + This field is a member of `oneof`_ ``_role``. + """ + + class GranteeType(proto.Enum): + r"""Defines types of the grantee of this permission. + + Values: + GRANTEE_TYPE_UNSPECIFIED (0): + The default value. This value is unused. + USER (1): + Represents a user. When set, you must provide email_address + for the user. + GROUP (2): + Represents a group. When set, you must provide email_address + for the group. + EVERYONE (3): + Represents access to everyone. No extra + information is required. + """ + GRANTEE_TYPE_UNSPECIFIED = 0 + USER = 1 + GROUP = 2 + EVERYONE = 3 + + class Role(proto.Enum): + r"""Defines the role granted by this permission. + + Values: + ROLE_UNSPECIFIED (0): + The default value. This value is unused. + OWNER (1): + Owner can use, update, share and delete the + resource. + WRITER (2): + Writer can use, update and share the + resource. + READER (3): + Reader can use the resource. + """ + ROLE_UNSPECIFIED = 0 + OWNER = 1 + WRITER = 2 + READER = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + grantee_type: GranteeType = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=GranteeType, + ) + email_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + role: Role = proto.Field( + proto.ENUM, + number=4, + optional=True, + enum=Role, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission_service.py new file mode 100644 index 000000000000..51be3d944fd4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/permission_service.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import permission as gag_permission + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "CreatePermissionRequest", + "GetPermissionRequest", + "ListPermissionsRequest", + "ListPermissionsResponse", + "UpdatePermissionRequest", + "DeletePermissionRequest", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + }, +) + + +class CreatePermissionRequest(proto.Message): + r"""Request to create a ``Permission``. + + Attributes: + parent (str): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` ``corpora/{corpus}`` + permission (google.ai.generativelanguage_v1beta.types.Permission): + Required. The permission to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + permission: gag_permission.Permission = proto.Field( + proto.MESSAGE, + number=2, + message=gag_permission.Permission, + ) + + +class GetPermissionRequest(proto.Message): + r"""Request for getting information about a specific ``Permission``. + + Attributes: + name (str): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPermissionsRequest(proto.Message): + r"""Request for listing permissions. + + Attributes: + parent (str): + Required. The parent resource of the permissions. Formats: + ``tunedModels/{tuned_model}`` ``corpora/{corpus}`` + page_size (int): + Optional. The maximum number of ``Permission``\ s to return + (per page). The service may return fewer permissions. + + If unspecified, at most 10 permissions will be returned. + This method returns at most 1000 permissions per page, even + if you pass larger page_size. + page_token (str): + Optional. A page token, received from a previous + ``ListPermissions`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListPermissions`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPermissionsResponse(proto.Message): + r"""Response from ``ListPermissions`` containing a paginated list of + permissions. + + Attributes: + permissions (MutableSequence[google.ai.generativelanguage_v1beta.types.Permission]): + Returned permissions. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + permissions: MutableSequence[gag_permission.Permission] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_permission.Permission, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdatePermissionRequest(proto.Message): + r"""Request to update the ``Permission``. + + Attributes: + permission (google.ai.generativelanguage_v1beta.types.Permission): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + """ + + permission: gag_permission.Permission = proto.Field( + proto.MESSAGE, + number=1, + message=gag_permission.Permission, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeletePermissionRequest(proto.Message): + r"""Request to delete the ``Permission``. + + Attributes: + name (str): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TransferOwnershipRequest(proto.Message): + r"""Request to transfer the ownership of the tuned model. + + Attributes: + name (str): + Required. The resource name of the tuned model to transfer + ownership. + + Format: ``tunedModels/my-model-id`` + email_address (str): + Required. The email address of the user to + whom the tuned model is being transferred to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + email_address: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TransferOwnershipResponse(proto.Message): + r"""Response from ``TransferOwnership``.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py new file mode 100644 index 000000000000..31bec83f9e1e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "Corpus", + "Document", + "StringList", + "CustomMetadata", + "MetadataFilter", + "Condition", + "Chunk", + "ChunkData", + }, +) + + +class Corpus(proto.Message): + r"""A ``Corpus`` is a collection of ``Document``\ s. A project can + create up to 5 corpora. + + Attributes: + name (str): + Immutable. Identifier. The ``Corpus`` resource name. The ID + (name excluding the "corpora/" prefix) can contain up to 40 + characters that are lowercase alphanumeric or dashes (-). + The ID cannot start or end with a dash. If the name is empty + on create, a unique name will be derived from + ``display_name`` along with a 12 character random suffix. + Example: ``corpora/my-awesome-corpora-123a456b789c`` + display_name (str): + Optional. The human-readable display name for the + ``Corpus``. The display name must be no more than 512 + characters in length, including spaces. Example: "Docs on + Semantic Retriever". + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Corpus`` was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Corpus`` was last + updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Document(proto.Message): + r"""A ``Document`` is a collection of ``Chunk``\ s. A ``Corpus`` can + have a maximum of 10,000 ``Document``\ s. + + Attributes: + name (str): + Immutable. Identifier. The ``Document`` resource name. The + ID (name excluding the `corpora/*/documents/` prefix) can + contain up to 40 characters that are lowercase alphanumeric + or dashes (-). The ID cannot start or end with a dash. If + the name is empty on create, a unique name will be derived + from ``display_name`` along with a 12 character random + suffix. Example: + ``corpora/{corpus_id}/documents/my-awesome-doc-123a456b789c`` + display_name (str): + Optional. The human-readable display name for the + ``Document``. The display name must be no more than 512 + characters in length, including spaces. Example: "Semantic + Retriever Documentation". + custom_metadata (MutableSequence[google.ai.generativelanguage_v1beta.types.CustomMetadata]): + Optional. User provided custom metadata stored as key-value + pairs used for querying. A ``Document`` can have a maximum + of 20 ``CustomMetadata``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Document`` was last + updated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Document`` was + created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + custom_metadata: MutableSequence["CustomMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CustomMetadata", + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class StringList(proto.Message): + r"""User provided string values assigned to a single metadata + key. + + Attributes: + values (MutableSequence[str]): + The string values of the metadata to store. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class CustomMetadata(proto.Message): + r"""User provided metadata stored as key-value pairs. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The string value of the metadata to store. + + This field is a member of `oneof`_ ``value``. + string_list_value (google.ai.generativelanguage_v1beta.types.StringList): + The StringList value of the metadata to + store. + + This field is a member of `oneof`_ ``value``. + numeric_value (float): + The numeric value of the metadata to store. + + This field is a member of `oneof`_ ``value``. + key (str): + Required. The key of the metadata to store. + """ + + string_value: str = proto.Field( + proto.STRING, + number=2, + oneof="value", + ) + string_list_value: "StringList" = proto.Field( + proto.MESSAGE, + number=6, + oneof="value", + message="StringList", + ) + numeric_value: float = proto.Field( + proto.FLOAT, + number=7, + oneof="value", + ) + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataFilter(proto.Message): + r"""User provided filter to limit retrieval based on ``Chunk`` or + ``Document`` level metadata values. Example (genre = drama OR genre + = action): key = "document.custom_metadata.genre" conditions = + [{string_value = "drama", operation = EQUAL}, {string_value = + "action", operation = EQUAL}] + + Attributes: + key (str): + Required. The key of the metadata to filter + on. + conditions (MutableSequence[google.ai.generativelanguage_v1beta.types.Condition]): + Required. The ``Condition``\ s for the given key that will + trigger this filter. Multiple ``Condition``\ s are joined by + logical ORs. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + conditions: MutableSequence["Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Condition", + ) + + +class Condition(proto.Message): + r"""Filter condition applicable to a single key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The string value to filter the metadata on. + + This field is a member of `oneof`_ ``value``. + numeric_value (float): + The numeric value to filter the metadata on. + + This field is a member of `oneof`_ ``value``. + operation (google.ai.generativelanguage_v1beta.types.Condition.Operator): + Required. Operator applied to the given + key-value pair to trigger the condition. + """ + + class Operator(proto.Enum): + r"""Defines the valid operators that can be applied to a + key-value pair. + + Values: + OPERATOR_UNSPECIFIED (0): + The default value. This value is unused. + LESS (1): + Supported by numeric. + LESS_EQUAL (2): + Supported by numeric. + EQUAL (3): + Supported by numeric & string. + GREATER_EQUAL (4): + Supported by numeric. + GREATER (5): + Supported by numeric. + NOT_EQUAL (6): + Supported by numeric & string. + INCLUDES (7): + Supported by string only when ``CustomMetadata`` value type + for the given key has a ``string_list_value``. + EXCLUDES (8): + Supported by string only when ``CustomMetadata`` value type + for the given key has a ``string_list_value``. + """ + OPERATOR_UNSPECIFIED = 0 + LESS = 1 + LESS_EQUAL = 2 + EQUAL = 3 + GREATER_EQUAL = 4 + GREATER = 5 + NOT_EQUAL = 6 + INCLUDES = 7 + EXCLUDES = 8 + + string_value: str = proto.Field( + proto.STRING, + number=1, + oneof="value", + ) + numeric_value: float = proto.Field( + proto.FLOAT, + number=6, + oneof="value", + ) + operation: Operator = proto.Field( + proto.ENUM, + number=5, + enum=Operator, + ) + + +class Chunk(proto.Message): + r"""A ``Chunk`` is a subpart of a ``Document`` that is treated as an + independent unit for the purposes of vector representation and + storage. A ``Corpus`` can have a maximum of 1 million ``Chunk``\ s. + + Attributes: + name (str): + Immutable. Identifier. The ``Chunk`` resource name. The ID + (name excluding the `corpora/*/documents/*/chunks/` prefix) + can contain up to 40 characters that are lowercase + alphanumeric or dashes (-). The ID cannot start or end with + a dash. If the name is empty on create, a random + 12-character unique ID will be generated. Example: + ``corpora/{corpus_id}/documents/{document_id}/chunks/123a456b789c`` + data (google.ai.generativelanguage_v1beta.types.ChunkData): + Required. The content for the ``Chunk``, such as the text + string. The maximum number of tokens per chunk is 2043. + custom_metadata (MutableSequence[google.ai.generativelanguage_v1beta.types.CustomMetadata]): + Optional. User provided custom metadata stored as key-value + pairs. The maximum number of ``CustomMetadata`` per chunk is + 20. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Chunk`` was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Chunk`` was last + updated. + state (google.ai.generativelanguage_v1beta.types.Chunk.State): + Output only. Current state of the ``Chunk``. + """ + + class State(proto.Enum): + r"""States for the lifecycle of a ``Chunk``. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + STATE_PENDING_PROCESSING (1): + ``Chunk`` is being processed (embedding and vector storage). + STATE_ACTIVE (2): + ``Chunk`` is processed and available for querying. + STATE_FAILED (10): + ``Chunk`` failed processing. + """ + STATE_UNSPECIFIED = 0 + STATE_PENDING_PROCESSING = 1 + STATE_ACTIVE = 2 + STATE_FAILED = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data: "ChunkData" = proto.Field( + proto.MESSAGE, + number=2, + message="ChunkData", + ) + custom_metadata: MutableSequence["CustomMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CustomMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + + +class ChunkData(proto.Message): + r"""Extracted data that represents the ``Chunk`` content. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The ``Chunk`` content as a string. The maximum number of + tokens per chunk is 2043. + + This field is a member of `oneof`_ ``data``. + """ + + string_value: str = proto.Field( + proto.STRING, + number=1, + oneof="data", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever_service.py new file mode 100644 index 000000000000..f55804da9a59 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever_service.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import retriever + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "CreateCorpusRequest", + "GetCorpusRequest", + "UpdateCorpusRequest", + "DeleteCorpusRequest", + "ListCorporaRequest", + "ListCorporaResponse", + "QueryCorpusRequest", + "QueryCorpusResponse", + "RelevantChunk", + "CreateDocumentRequest", + "GetDocumentRequest", + "UpdateDocumentRequest", + "DeleteDocumentRequest", + "ListDocumentsRequest", + "ListDocumentsResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "CreateChunkRequest", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "GetChunkRequest", + "UpdateChunkRequest", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "DeleteChunkRequest", + "BatchDeleteChunksRequest", + "ListChunksRequest", + "ListChunksResponse", + }, +) + + +class CreateCorpusRequest(proto.Message): + r"""Request to create a ``Corpus``. + + Attributes: + corpus (google.ai.generativelanguage_v1beta.types.Corpus): + Required. The ``Corpus`` to create. + """ + + corpus: retriever.Corpus = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + + +class GetCorpusRequest(proto.Message): + r"""Request for getting information about a specific ``Corpus``. + + Attributes: + name (str): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCorpusRequest(proto.Message): + r"""Request to update a ``Corpus``. + + Attributes: + corpus (google.ai.generativelanguage_v1beta.types.Corpus): + Required. The ``Corpus`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``display_name``. + """ + + corpus: retriever.Corpus = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteCorpusRequest(proto.Message): + r"""Request to delete a ``Corpus``. + + Attributes: + name (str): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + force (bool): + Optional. If set to true, any ``Document``\ s and objects + related to this ``Corpus`` will also be deleted. + + If false (the default), a ``FAILED_PRECONDITION`` error will + be returned if ``Corpus`` contains any ``Document``\ s. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListCorporaRequest(proto.Message): + r"""Request for listing ``Corpora``. + + Attributes: + page_size (int): + Optional. The maximum number of ``Corpora`` to return (per + page). The service may return fewer ``Corpora``. + + If unspecified, at most 10 ``Corpora`` will be returned. The + maximum size limit is 20 ``Corpora`` per page. + page_token (str): + Optional. A page token, received from a previous + ``ListCorpora`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListCorpora`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListCorporaResponse(proto.Message): + r"""Response from ``ListCorpora`` containing a paginated list of + ``Corpora``. The results are sorted by ascending + ``corpus.create_time``. + + Attributes: + corpora (MutableSequence[google.ai.generativelanguage_v1beta.types.Corpus]): + The returned corpora. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + corpora: MutableSequence[retriever.Corpus] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class QueryCorpusRequest(proto.Message): + r"""Request for querying a ``Corpus``. + + Attributes: + name (str): + Required. The name of the ``Corpus`` to query. Example: + ``corpora/my-corpus-123`` + query (str): + Required. Query string to perform semantic + search. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1beta.types.MetadataFilter]): + Optional. Filter for ``Chunk`` and ``Document`` metadata. + Each ``MetadataFilter`` object should correspond to a unique + key. Multiple ``MetadataFilter`` objects are joined by + logical "AND"s. + + Example query at document level: (year >= 2020 OR year < + 2010) AND (genre = drama OR genre = action) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "document.custom_metadata.year" conditions = [{int_value = + 2020, operation = GREATER_EQUAL}, {int_value = 2010, + operation = LESS}]}, {key = "document.custom_metadata.year" + conditions = [{int_value = 2020, operation = GREATER_EQUAL}, + {int_value = 2010, operation = LESS}]}, {key = + "document.custom_metadata.genre" conditions = [{string_value + = "drama", operation = EQUAL}, {string_value = "action", + operation = EQUAL}]}] + + Example query at chunk level for a numeric range of values: + (year > 2015 AND year <= 2020) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2015, operation = GREATER}]}, {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = LESS_EQUAL}]}] + + Note: "AND"s for the same key are only supported for numeric + values. String values only support "OR"s for the same key. + results_count (int): + Optional. The maximum number of ``Chunk``\ s to return. The + service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum specified result count is 100. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=retriever.MetadataFilter, + ) + results_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class QueryCorpusResponse(proto.Message): + r"""Response from ``QueryCorpus`` containing a list of relevant chunks. + + Attributes: + relevant_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.RelevantChunk]): + The relevant chunks. + """ + + relevant_chunks: MutableSequence["RelevantChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RelevantChunk", + ) + + +class RelevantChunk(proto.Message): + r"""The information for a chunk relevant to a query. + + Attributes: + chunk_relevance_score (float): + ``Chunk`` relevance to the query. + chunk (google.ai.generativelanguage_v1beta.types.Chunk): + ``Chunk`` associated with the query. + """ + + chunk_relevance_score: float = proto.Field( + proto.FLOAT, + number=1, + ) + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Chunk, + ) + + +class CreateDocumentRequest(proto.Message): + r"""Request to create a ``Document``. + + Attributes: + parent (str): + Required. The name of the ``Corpus`` where this ``Document`` + will be created. Example: ``corpora/my-corpus-123`` + document (google.ai.generativelanguage_v1beta.types.Document): + Required. The ``Document`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + document: retriever.Document = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Document, + ) + + +class GetDocumentRequest(proto.Message): + r"""Request for getting information about a specific ``Document``. + + Attributes: + name (str): + Required. The name of the ``Document`` to retrieve. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDocumentRequest(proto.Message): + r"""Request to update a ``Document``. + + Attributes: + document (google.ai.generativelanguage_v1beta.types.Document): + Required. The ``Document`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``display_name`` and ``custom_metadata``. + """ + + document: retriever.Document = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Document, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDocumentRequest(proto.Message): + r"""Request to delete a ``Document``. + + Attributes: + name (str): + Required. The resource name of the ``Document`` to delete. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + force (bool): + Optional. If set to true, any ``Chunk``\ s and objects + related to this ``Document`` will also be deleted. + + If false (the default), a ``FAILED_PRECONDITION`` error will + be returned if ``Document`` contains any ``Chunk``\ s. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListDocumentsRequest(proto.Message): + r"""Request for listing ``Document``\ s. + + Attributes: + parent (str): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + page_size (int): + Optional. The maximum number of ``Document``\ s to return + (per page). The service may return fewer ``Document``\ s. + + If unspecified, at most 10 ``Document``\ s will be returned. + The maximum size limit is 20 ``Document``\ s per page. + page_token (str): + Optional. A page token, received from a previous + ``ListDocuments`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListDocuments`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDocumentsResponse(proto.Message): + r"""Response from ``ListDocuments`` containing a paginated list of + ``Document``\ s. The ``Document``\ s are sorted by ascending + ``document.create_time``. + + Attributes: + documents (MutableSequence[google.ai.generativelanguage_v1beta.types.Document]): + The returned ``Document``\ s. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + documents: MutableSequence[retriever.Document] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Document, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class QueryDocumentRequest(proto.Message): + r"""Request for querying a ``Document``. + + Attributes: + name (str): + Required. The name of the ``Document`` to query. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + query (str): + Required. Query string to perform semantic + search. + results_count (int): + Optional. The maximum number of ``Chunk``\ s to return. The + service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum specified result count is 100. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1beta.types.MetadataFilter]): + Optional. Filter for ``Chunk`` metadata. Each + ``MetadataFilter`` object should correspond to a unique key. + Multiple ``MetadataFilter`` objects are joined by logical + "AND"s. + + Note: ``Document``-level filtering is not supported for this + request because a ``Document`` name is already specified. + + Example query: (year >= 2020 OR year < 2010) AND (genre = + drama OR genre = action) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = GREATER_EQUAL}, {int_value = 2010, + operation = LESS}}, {key = "chunk.custom_metadata.genre" + conditions = [{string_value = "drama", operation = EQUAL}, + {string_value = "action", operation = EQUAL}}] + + Example query for a numeric range of values: (year > 2015 + AND year <= 2020) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2015, operation = GREATER}]}, {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = LESS_EQUAL}]}] + + Note: "AND"s for the same key are only supported for numeric + values. String values only support "OR"s for the same key. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + results_count: int = proto.Field( + proto.INT32, + number=3, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=retriever.MetadataFilter, + ) + + +class QueryDocumentResponse(proto.Message): + r"""Response from ``QueryDocument`` containing a list of relevant + chunks. + + Attributes: + relevant_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.RelevantChunk]): + The returned relevant chunks. + """ + + relevant_chunks: MutableSequence["RelevantChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RelevantChunk", + ) + + +class CreateChunkRequest(proto.Message): + r"""Request to create a ``Chunk``. + + Attributes: + parent (str): + Required. The name of the ``Document`` where this ``Chunk`` + will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + chunk (google.ai.generativelanguage_v1beta.types.Chunk): + Required. The ``Chunk`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Chunk, + ) + + +class BatchCreateChunksRequest(proto.Message): + r"""Request to batch create ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` where this batch of + ``Chunk``\ s will be created. The parent field in every + ``CreateChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.CreateChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to create. A maximum of 100 ``Chunk``\ s can be created in a + batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateChunkRequest", + ) + + +class BatchCreateChunksResponse(proto.Message): + r"""Response from ``BatchCreateChunks`` containing a list of created + ``Chunk``\ s. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.Chunk]): + ``Chunk``\ s created. + """ + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + + +class GetChunkRequest(proto.Message): + r"""Request for getting information about a specific ``Chunk``. + + Attributes: + name (str): + Required. The name of the ``Chunk`` to retrieve. Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateChunkRequest(proto.Message): + r"""Request to update a ``Chunk``. + + Attributes: + chunk (google.ai.generativelanguage_v1beta.types.Chunk): + Required. The ``Chunk`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``custom_metadata`` and ``data``. + """ + + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class BatchUpdateChunksRequest(proto.Message): + r"""Request to batch update ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` containing the + ``Chunk``\ s to update. The parent field in every + ``UpdateChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.UpdateChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to update. A maximum of 100 ``Chunk``\ s can be updated in a + batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["UpdateChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateChunkRequest", + ) + + +class BatchUpdateChunksResponse(proto.Message): + r"""Response from ``BatchUpdateChunks`` containing a list of updated + ``Chunk``\ s. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.Chunk]): + ``Chunk``\ s updated. + """ + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + + +class DeleteChunkRequest(proto.Message): + r"""Request to delete a ``Chunk``. + + Attributes: + name (str): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchDeleteChunksRequest(proto.Message): + r"""Request to batch delete ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` containing the + ``Chunk``\ s to delete. The parent field in every + ``DeleteChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.DeleteChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to delete. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["DeleteChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DeleteChunkRequest", + ) + + +class ListChunksRequest(proto.Message): + r"""Request for listing ``Chunk``\ s. + + Attributes: + parent (str): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + page_size (int): + Optional. The maximum number of ``Chunk``\ s to return (per + page). The service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum size limit is 100 ``Chunk``\ s per page. + page_token (str): + Optional. A page token, received from a previous + ``ListChunks`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListChunks`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListChunksResponse(proto.Message): + r"""Response from ``ListChunks`` containing a paginated list of + ``Chunk``\ s. The ``Chunk``\ s are sorted by ascending + ``chunk.create_time``. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.Chunk]): + The returned ``Chunk``\ s. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py new file mode 100644 index 000000000000..30ce00d36f97 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "HarmCategory", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + }, +) + + +class HarmCategory(proto.Enum): + r"""The category of a rating. + + These categories cover various kinds of harms that developers + may wish to adjust. + + Values: + HARM_CATEGORY_UNSPECIFIED (0): + Category is unspecified. + HARM_CATEGORY_DEROGATORY (1): + Negative or harmful comments targeting + identity and/or protected attribute. + HARM_CATEGORY_TOXICITY (2): + Content that is rude, disrepspectful, or + profane. + HARM_CATEGORY_VIOLENCE (3): + Describes scenarios depictng violence against + an individual or group, or general descriptions + of gore. + HARM_CATEGORY_SEXUAL (4): + Contains references to sexual acts or other + lewd content. + HARM_CATEGORY_MEDICAL (5): + Promotes unchecked medical advice. + HARM_CATEGORY_DANGEROUS (6): + Dangerous content that promotes, facilitates, + or encourages harmful acts. + HARM_CATEGORY_HARASSMENT (7): + Harasment content. + HARM_CATEGORY_HATE_SPEECH (8): + Hate speech and content. + HARM_CATEGORY_SEXUALLY_EXPLICIT (9): + Sexually explicit content. + HARM_CATEGORY_DANGEROUS_CONTENT (10): + Dangerous content. + """ + HARM_CATEGORY_UNSPECIFIED = 0 + HARM_CATEGORY_DEROGATORY = 1 + HARM_CATEGORY_TOXICITY = 2 + HARM_CATEGORY_VIOLENCE = 3 + HARM_CATEGORY_SEXUAL = 4 + HARM_CATEGORY_MEDICAL = 5 + HARM_CATEGORY_DANGEROUS = 6 + HARM_CATEGORY_HARASSMENT = 7 + HARM_CATEGORY_HATE_SPEECH = 8 + HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 + HARM_CATEGORY_DANGEROUS_CONTENT = 10 + + +class ContentFilter(proto.Message): + r"""Content filtering metadata associated with processing a + single request. + ContentFilter contains a reason and an optional supporting + string. The reason may be unspecified. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reason (google.ai.generativelanguage_v1beta.types.ContentFilter.BlockedReason): + The reason content was blocked during request + processing. + message (str): + A string that describes the filtering + behavior in more detail. + + This field is a member of `oneof`_ ``_message``. + """ + + class BlockedReason(proto.Enum): + r"""A list of reasons why content may have been blocked. + + Values: + BLOCKED_REASON_UNSPECIFIED (0): + A blocked reason was not specified. + SAFETY (1): + Content was blocked by safety settings. + OTHER (2): + Content was blocked, but the reason is + uncategorized. + """ + BLOCKED_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + reason: BlockedReason = proto.Field( + proto.ENUM, + number=1, + enum=BlockedReason, + ) + message: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class SafetyFeedback(proto.Message): + r"""Safety feedback for an entire request. + + This field is populated if content in the input and/or response + is blocked due to safety settings. SafetyFeedback may not exist + for every HarmCategory. Each SafetyFeedback will return the + safety settings used by the request as well as the lowest + HarmProbability that should be allowed in order to return a + result. + + Attributes: + rating (google.ai.generativelanguage_v1beta.types.SafetyRating): + Safety rating evaluated from content. + setting (google.ai.generativelanguage_v1beta.types.SafetySetting): + Safety settings applied to the request. + """ + + rating: "SafetyRating" = proto.Field( + proto.MESSAGE, + number=1, + message="SafetyRating", + ) + setting: "SafetySetting" = proto.Field( + proto.MESSAGE, + number=2, + message="SafetySetting", + ) + + +class SafetyRating(proto.Message): + r"""Safety rating for a piece of content. + + The safety rating contains the category of harm and the harm + probability level in that category for a piece of content. + Content is classified for safety across a number of harm + categories and the probability of the harm classification is + included here. + + Attributes: + category (google.ai.generativelanguage_v1beta.types.HarmCategory): + Required. The category for this rating. + probability (google.ai.generativelanguage_v1beta.types.SafetyRating.HarmProbability): + Required. The probability of harm for this + content. + blocked (bool): + Was this content blocked because of this + rating? + """ + + class HarmProbability(proto.Enum): + r"""The probability that a piece of content is harmful. + + The classification system gives the probability of the content + being unsafe. This does not indicate the severity of harm for a + piece of content. + + Values: + HARM_PROBABILITY_UNSPECIFIED (0): + Probability is unspecified. + NEGLIGIBLE (1): + Content has a negligible chance of being + unsafe. + LOW (2): + Content has a low chance of being unsafe. + MEDIUM (3): + Content has a medium chance of being unsafe. + HIGH (4): + Content has a high chance of being unsafe. + """ + HARM_PROBABILITY_UNSPECIFIED = 0 + NEGLIGIBLE = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + probability: HarmProbability = proto.Field( + proto.ENUM, + number=4, + enum=HarmProbability, + ) + blocked: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class SafetySetting(proto.Message): + r"""Safety setting, affecting the safety-blocking behavior. + + Passing a safety setting for a category changes the allowed + proability that content is blocked. + + Attributes: + category (google.ai.generativelanguage_v1beta.types.HarmCategory): + Required. The category for this setting. + threshold (google.ai.generativelanguage_v1beta.types.SafetySetting.HarmBlockThreshold): + Required. Controls the probability threshold + at which harm is blocked. + """ + + class HarmBlockThreshold(proto.Enum): + r"""Block at and beyond a specified harm probability. + + Values: + HARM_BLOCK_THRESHOLD_UNSPECIFIED (0): + Threshold is unspecified. + BLOCK_LOW_AND_ABOVE (1): + Content with NEGLIGIBLE will be allowed. + BLOCK_MEDIUM_AND_ABOVE (2): + Content with NEGLIGIBLE and LOW will be + allowed. + BLOCK_ONLY_HIGH (3): + Content with NEGLIGIBLE, LOW, and MEDIUM will + be allowed. + BLOCK_NONE (4): + All content will be allowed. + """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 + BLOCK_LOW_AND_ABOVE = 1 + BLOCK_MEDIUM_AND_ABOVE = 2 + BLOCK_ONLY_HIGH = 3 + BLOCK_NONE = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + threshold: HarmBlockThreshold = proto.Field( + proto.ENUM, + number=4, + enum=HarmBlockThreshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/text_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/text_service.py new file mode 100644 index 000000000000..0b0ba3e53191 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/text_service.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1beta.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "GenerateTextRequest", + "GenerateTextResponse", + "TextPrompt", + "TextCompletion", + "EmbedTextRequest", + "EmbedTextResponse", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "Embedding", + "CountTextTokensRequest", + "CountTextTokensResponse", + }, +) + + +class GenerateTextRequest(proto.Message): + r"""Request to generate a text completion response from the + model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the ``Model`` or ``TunedModel`` to use + for generating the completion. Examples: + models/text-bison-001 tunedModels/sentence-translator-u3b7m + prompt (google.ai.generativelanguage_v1beta.types.TextPrompt): + Required. The free-form input text given to + the model as a prompt. + Given a prompt, the model will generate a + TextCompletion response it predicts as the + completion of the input text. + temperature (float): + Optional. Controls the randomness of the output. Note: The + default value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, this + will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit specified + in the ``Model`` specification. + + This field is a member of `oneof`_ ``_max_output_tokens``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most likely tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits number of tokens + based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_k``. + safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + that will be enforced on the ``GenerateTextRequest.prompt`` + and ``GenerateTextResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any prompts and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_DEROGATORY, HARM_CATEGORY_TOXICITY, + HARM_CATEGORY_VIOLENCE, HARM_CATEGORY_SEXUAL, + HARM_CATEGORY_MEDICAL, HARM_CATEGORY_DANGEROUS are supported + in text service. + stop_sequences (MutableSequence[str]): + The set of character sequences (up to 5) that + will stop output generation. If specified, the + API will stop at the first appearance of a stop + sequence. The stop sequence will not be included + as part of the response. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "TextPrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="TextPrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=safety.SafetySetting, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + +class GenerateTextResponse(proto.Message): + r"""The response from the model, including candidate completions. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.TextCompletion]): + Candidate responses from the model. + filters (MutableSequence[google.ai.generativelanguage_v1beta.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. This indicates the smallest change to the + ``SafetySettings`` that would be necessary to unblock at + least 1 response. + + The blocking is configured by the ``SafetySettings`` in the + request (or the default ``SafetySettings`` of the API). + safety_feedback (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyFeedback]): + Returns any safety feedback related to + content filtering. + """ + + candidates: MutableSequence["TextCompletion"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TextCompletion", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + safety_feedback: MutableSequence[safety.SafetyFeedback] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=safety.SafetyFeedback, + ) + + +class TextPrompt(proto.Message): + r"""Text given to the model as a prompt. + + The Model will use this TextPrompt to Generate a text + completion. + + Attributes: + text (str): + Required. The prompt text. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TextCompletion(proto.Message): + r"""Output text returned from a model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output (str): + Output only. The generated text returned from + the model. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetyRating]): + Ratings for the safety of a response. + + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1beta.types.CitationMetadata): + Output only. Citation information for model-generated + ``output`` in this ``TextCompletion``. + + This field may be populated with attribution information for + any text included in the ``output``. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + output: str = proto.Field( + proto.STRING, + number=1, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class EmbedTextRequest(proto.Message): + r"""Request to get a text embedding from the model. + + Attributes: + model (str): + Required. The model name to use with the + format model=models/{model}. + text (str): + Optional. The free-form input text that the + model will turn into an embedding. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + text: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EmbedTextResponse(proto.Message): + r"""The response to a EmbedTextRequest. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + embedding (google.ai.generativelanguage_v1beta.types.Embedding): + Output only. The embedding generated from the + input text. + + This field is a member of `oneof`_ ``_embedding``. + """ + + embedding: "Embedding" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Embedding", + ) + + +class BatchEmbedTextRequest(proto.Message): + r"""Batch request to get a text embedding from the model. + + Attributes: + model (str): + Required. The name of the ``Model`` to use for generating + the embedding. Examples: models/embedding-gecko-001 + texts (MutableSequence[str]): + Optional. The free-form input texts that the + model will turn into an embedding. The current + limit is 100 texts, over which an error will be + thrown. + requests (MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedTextRequest]): + Optional. Embed requests for the batch. Only one of + ``texts`` or ``requests`` can be set. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + texts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + requests: MutableSequence["EmbedTextRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="EmbedTextRequest", + ) + + +class BatchEmbedTextResponse(proto.Message): + r"""The response to a EmbedTextRequest. + + Attributes: + embeddings (MutableSequence[google.ai.generativelanguage_v1beta.types.Embedding]): + Output only. The embeddings generated from + the input text. + """ + + embeddings: MutableSequence["Embedding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Embedding", + ) + + +class Embedding(proto.Message): + r"""A list of floats representing the embedding. + + Attributes: + value (MutableSequence[float]): + The embedding values. + """ + + value: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +class CountTextTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + prompt (google.ai.generativelanguage_v1beta.types.TextPrompt): + Required. The free-form input text given to + the model as a prompt. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "TextPrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="TextPrompt", + ) + + +class CountTextTokensResponse(proto.Message): + r"""A response from ``CountTextTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + token_count (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py new file mode 100644 index 000000000000..79c7889f5173 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "TunedModel", + "TunedModelSource", + "TuningTask", + "Hyperparameters", + "Dataset", + "TuningExamples", + "TuningExample", + "TuningSnapshot", + }, +) + + +class TunedModel(proto.Message): + r"""A fine-tuned model created using + ModelService.CreateTunedModel. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tuned_model_source (google.ai.generativelanguage_v1beta.types.TunedModelSource): + Optional. TunedModel to use as the starting + point for training the new model. + + This field is a member of `oneof`_ ``source_model``. + base_model (str): + Immutable. The name of the ``Model`` to tune. Example: + ``models/text-bison-001`` + + This field is a member of `oneof`_ ``source_model``. + name (str): + Output only. The tuned model name. A unique name will be + generated on create. Example: ``tunedModels/az2mb0bpw6i`` If + display_name is set on create, the id portion of the name + will be set by concatenating the words of the display_name + with hyphens and adding a random portion for uniqueness. + Example: display_name = "Sentence Translator" name = + "tunedModels/sentence-translator-u3b7m". + display_name (str): + Optional. The name to display for this model + in user interfaces. The display name must be up + to 40 characters including spaces. + description (str): + Optional. A short description of this model. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + Optional. For Nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_top_k``. + state (google.ai.generativelanguage_v1beta.types.TunedModel.State): + Output only. The state of the tuned model. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this model + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this model + was updated. + tuning_task (google.ai.generativelanguage_v1beta.types.TuningTask): + Required. The tuning task that creates the + tuned model. + """ + + class State(proto.Enum): + r"""The state of the tuned model. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is unused. + CREATING (1): + The model is being created. + ACTIVE (2): + The model is ready to be used. + FAILED (3): + The model failed to be created. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + tuned_model_source: "TunedModelSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source_model", + message="TunedModelSource", + ) + base_model: str = proto.Field( + proto.STRING, + number=4, + oneof="source_model", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=11, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=12, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=13, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + tuning_task: "TuningTask" = proto.Field( + proto.MESSAGE, + number=10, + message="TuningTask", + ) + + +class TunedModelSource(proto.Message): + r"""Tuned model as a source for training a new model. + + Attributes: + tuned_model (str): + Immutable. The name of the ``TunedModel`` to use as the + starting point for training the new model. Example: + ``tunedModels/my-tuned-model`` + base_model (str): + Output only. The name of the base ``Model`` this + ``TunedModel`` was tuned from. Example: + ``models/text-bison-001`` + """ + + tuned_model: str = proto.Field( + proto.STRING, + number=1, + ) + base_model: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TuningTask(proto.Message): + r"""Tuning tasks that create tuned models. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when tuning this + model started. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when tuning this + model completed. + snapshots (MutableSequence[google.ai.generativelanguage_v1beta.types.TuningSnapshot]): + Output only. Metrics collected during tuning. + training_data (google.ai.generativelanguage_v1beta.types.Dataset): + Required. Input only. Immutable. The model + training data. + hyperparameters (google.ai.generativelanguage_v1beta.types.Hyperparameters): + Immutable. Hyperparameters controlling the + tuning process. If not provided, default values + will be used. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + snapshots: MutableSequence["TuningSnapshot"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TuningSnapshot", + ) + training_data: "Dataset" = proto.Field( + proto.MESSAGE, + number=4, + message="Dataset", + ) + hyperparameters: "Hyperparameters" = proto.Field( + proto.MESSAGE, + number=5, + message="Hyperparameters", + ) + + +class Hyperparameters(proto.Message): + r"""Hyperparameters controlling the tuning process. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + epoch_count (int): + Immutable. The number of training epochs. An + epoch is one pass through the training data. If + not set, a default of 10 will be used. + + This field is a member of `oneof`_ ``_epoch_count``. + batch_size (int): + Immutable. The batch size hyperparameter for + tuning. If not set, a default of 16 or 64 will + be used based on the number of training + examples. + + This field is a member of `oneof`_ ``_batch_size``. + learning_rate (float): + Immutable. The learning rate hyperparameter + for tuning. If not set, a default of 0.0002 or + 0.002 will be calculated based on the number of + training examples. + + This field is a member of `oneof`_ ``_learning_rate``. + """ + + epoch_count: int = proto.Field( + proto.INT32, + number=14, + optional=True, + ) + batch_size: int = proto.Field( + proto.INT32, + number=15, + optional=True, + ) + learning_rate: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + + +class Dataset(proto.Message): + r"""Dataset for training or validation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + examples (google.ai.generativelanguage_v1beta.types.TuningExamples): + Optional. Inline examples. + + This field is a member of `oneof`_ ``dataset``. + """ + + examples: "TuningExamples" = proto.Field( + proto.MESSAGE, + number=1, + oneof="dataset", + message="TuningExamples", + ) + + +class TuningExamples(proto.Message): + r"""A set of tuning examples. Can be training or validation data. + + Attributes: + examples (MutableSequence[google.ai.generativelanguage_v1beta.types.TuningExample]): + Required. The examples. Example input can be + for text or discuss, but all examples in a set + must be of the same type. + """ + + examples: MutableSequence["TuningExample"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TuningExample", + ) + + +class TuningExample(proto.Message): + r"""A single example for tuning. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text_input (str): + Optional. Text model input. + + This field is a member of `oneof`_ ``model_input``. + output (str): + Required. The expected model output. + """ + + text_input: str = proto.Field( + proto.STRING, + number=1, + oneof="model_input", + ) + output: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TuningSnapshot(proto.Message): + r"""Record for a single tuning step. + + Attributes: + step (int): + Output only. The tuning step. + epoch (int): + Output only. The epoch this step was part of. + mean_loss (float): + Output only. The mean loss of the training + examples for this step. + compute_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this metric + was computed. + """ + + step: int = proto.Field( + proto.INT32, + number=1, + ) + epoch: int = proto.Field( + proto.INT32, + number=2, + ) + mean_loss: float = proto.Field( + proto.FLOAT, + number=3, + ) + compute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 288d10b11145..3754937a30b0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 288d10b11145..3754937a30b0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.5.0" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/noxfile.py b/packages/google-ai-generativelanguage/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-ai-generativelanguage/noxfile.py +++ b/packages/google-ai-generativelanguage/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_async.py new file mode 100644 index 000000000000..888e16ab2eab --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_sync.py new file mode 100644 index 000000000000..2e395bd54cac --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_batch_embed_contents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_async.py new file mode 100644 index 000000000000..cc99212bd07f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_CountTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_CountTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_sync.py new file mode 100644 index 000000000000..0eb80d8eb4ef --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_count_tokens_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_CountTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_count_tokens(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_CountTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_async.py new file mode 100644 index 000000000000..c48444f44753 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_EmbedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_embed_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_EmbedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_sync.py new file mode 100644 index 000000000000..a4c8aa05edf2 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_embed_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_EmbedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_embed_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_EmbedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_async.py new file mode 100644 index 000000000000..2a5f898ba337 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_GenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_GenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_sync.py new file mode 100644 index 000000000000..b2ab403f2565 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_generate_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_GenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_GenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_async.py new file mode 100644 index 000000000000..e17c7b39be8a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_sync.py new file mode 100644 index 000000000000..ef1332ba1925 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_generative_service_stream_generate_content_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_async.py new file mode 100644 index 000000000000..536a666693ea --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_ModelService_GetModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_get_model(): + # Create a client + client = generativelanguage_v1.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_ModelService_GetModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_sync.py new file mode 100644 index 000000000000..f9c1bca11861 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_get_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_ModelService_GetModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_get_model(): + # Create a client + client = generativelanguage_v1.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1_generated_ModelService_GetModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_async.py new file mode 100644 index 000000000000..daeefd8eb668 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_ModelService_ListModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +async def sample_list_models(): + # Create a client + client = generativelanguage_v1.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1_generated_ModelService_ListModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_sync.py new file mode 100644 index 000000000000..45f55d130e67 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1_generated_model_service_list_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1_generated_ModelService_ListModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1 + + +def sample_list_models(): + # Create a client + client = generativelanguage_v1.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1_generated_ModelService_ListModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_async.py new file mode 100644 index 000000000000..05f4ed4b23cd --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_sync.py new file mode 100644 index 000000000000..1a8a1fa7fccf --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_count_message_tokens_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_async.py new file mode 100644 index 000000000000..adb768c3fb01 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_DiscussService_GenerateMessage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_DiscussService_GenerateMessage_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_sync.py new file mode 100644 index 000000000000..eb3418bd36e7 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_discuss_service_generate_message_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_DiscussService_GenerateMessage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_generate_message(): + # Create a client + client = generativelanguage_v1beta.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1beta.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_DiscussService_GenerateMessage_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_async.py new file mode 100644 index 000000000000..857072a8535f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1beta.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_sync.py new file mode 100644 index 000000000000..820b7e9b735a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_batch_embed_contents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1beta.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_async.py new file mode 100644 index 000000000000..f87099aac576 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_CountTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_CountTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_sync.py new file mode 100644 index 000000000000..6ae5aa75fb73 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_count_tokens_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_CountTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_count_tokens(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_CountTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_async.py new file mode 100644 index 000000000000..391accc15459 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_EmbedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_embed_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_EmbedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_sync.py new file mode 100644 index 000000000000..f0383ccc4461 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_embed_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_EmbedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_embed_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_EmbedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_async.py new file mode 100644 index 000000000000..14dc2e490fb9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_generate_answer(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = await client.generate_answer(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_sync.py new file mode 100644 index 000000000000..74ea769614af --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_answer_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_generate_answer(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = client.generate_answer(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_async.py new file mode 100644 index 000000000000..9445101240f2 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_GenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_GenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_sync.py new file mode 100644 index 000000000000..c8e66df83bec --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_generate_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_GenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_GenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_async.py new file mode 100644 index 000000000000..38286cae7948 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_sync.py new file mode 100644 index 000000000000..e37f9a08530a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_generative_service_stream_generate_content_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1beta.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_async.py new file mode 100644 index 000000000000..2262962029f1 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_CreateTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.CreateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_CreateTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_sync.py new file mode 100644 index 000000000000..6df1d7f57550 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_create_tuned_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_CreateTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.CreateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_CreateTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_async.py new file mode 100644 index 000000000000..40e6f6769563 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + await client.delete_tuned_model(request=request) + + +# [END generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_sync.py new file mode 100644 index 000000000000..295bdc565b42 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_delete_tuned_model_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + client.delete_tuned_model(request=request) + + +# [END generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_async.py new file mode 100644 index 000000000000..0bcb3e02a62d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_GetModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_GetModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_sync.py new file mode 100644 index 000000000000..5c81a46e8fdc --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_GetModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_GetModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_async.py new file mode 100644 index 000000000000..450f0cad30d3 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_GetTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_GetTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_sync.py new file mode 100644 index 000000000000..67682c9dd23a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_get_tuned_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_GetTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_GetTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_async.py new file mode 100644 index 000000000000..caf3332547cb --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_ListModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_ListModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_sync.py new file mode 100644 index 000000000000..804db1790e46 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_ListModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_ListModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_async.py new file mode 100644 index 000000000000..cc7a15078df8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTunedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_ListTunedModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_ListTunedModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_sync.py new file mode 100644 index 000000000000..c9562f6d5e4f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_list_tuned_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTunedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_ListTunedModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_ListTunedModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_async.py new file mode 100644 index 000000000000..7c5f9b7bb810 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceAsyncClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.UpdateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + response = await client.update_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_sync.py new file mode 100644 index 000000000000..c168ff81a340 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_model_service_update_tuned_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1beta.ModelServiceClient() + + # Initialize request argument(s) + tuned_model = generativelanguage_v1beta.TunedModel() + tuned_model.tuning_task.training_data.examples.examples.text_input = "text_input_value" + tuned_model.tuning_task.training_data.examples.examples.output = "output_value" + + request = generativelanguage_v1beta.UpdateTunedModelRequest( + tuned_model=tuned_model, + ) + + # Make the request + response = client.update_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_async.py new file mode 100644 index 000000000000..aaa3e5de8e79 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_CreatePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_CreatePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_sync.py new file mode 100644 index 000000000000..c39e3ca1a58e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_create_permission_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_CreatePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_CreatePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_async.py new file mode 100644 index 000000000000..392d2bb22836 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_DeletePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_delete_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + await client.delete_permission(request=request) + + +# [END generativelanguage_v1beta_generated_PermissionService_DeletePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_sync.py new file mode 100644 index 000000000000..4ee5fa337b0b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_delete_permission_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_DeletePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_delete_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + client.delete_permission(request=request) + + +# [END generativelanguage_v1beta_generated_PermissionService_DeletePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_async.py new file mode 100644 index 000000000000..efe727b0d7bf --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_GetPermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_GetPermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_sync.py new file mode 100644 index 000000000000..7b6a4653b6d1 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_get_permission_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_GetPermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = client.get_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_GetPermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_async.py new file mode 100644 index 000000000000..4e2ee7ef39de --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_ListPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_permissions(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_ListPermissions_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_sync.py new file mode 100644 index 000000000000..5caf2589e792 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_list_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_ListPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_permissions(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_ListPermissions_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_async.py new file mode 100644 index 000000000000..0f7a3d028beb --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TransferOwnership +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_TransferOwnership_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = await client.transfer_ownership(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_TransferOwnership_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_sync.py new file mode 100644 index 000000000000..8fffd9821b81 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_transfer_ownership_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TransferOwnership +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_TransferOwnership_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = client.transfer_ownership(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_TransferOwnership_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_async.py new file mode 100644 index 000000000000..a9e926b06b5d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_UpdatePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdatePermissionRequest( + ) + + # Make the request + response = await client.update_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_UpdatePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_sync.py new file mode 100644 index 000000000000..04e8ad9e9d90 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_permission_service_update_permission_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PermissionService_UpdatePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_permission(): + # Create a client + client = generativelanguage_v1beta.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdatePermissionRequest( + ) + + # Make the request + response = client.update_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PermissionService_UpdatePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_async.py new file mode 100644 index 000000000000..103d94d9db16 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_create_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_sync.py new file mode 100644 index 000000000000..d5af383da5d6 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_create_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_async.py new file mode 100644 index 000000000000..8a68d6933cff --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1beta.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + await client.batch_delete_chunks(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_sync.py new file mode 100644 index 000000000000..df9d3abd31ca --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1beta.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + client.batch_delete_chunks(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_async.py new file mode 100644 index 000000000000..64595c08737f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_update_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_sync.py new file mode 100644 index 000000000000..a659f9a4b0f8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1beta.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_update_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_async.py new file mode 100644 index 000000000000..1d058bd2282f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = await client.create_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_sync.py new file mode 100644 index 000000000000..19f079d74dc3 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_chunk_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = client.create_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_async.py new file mode 100644 index 000000000000..c2896649487c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCorpusRequest( + ) + + # Make the request + response = await client.create_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_sync.py new file mode 100644 index 000000000000..5527d28e3f39 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_corpus_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateCorpusRequest( + ) + + # Make the request + response = client.create_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_async.py new file mode 100644 index 000000000000..f90311756f25 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_create_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_sync.py new file mode 100644 index 000000000000..c47e7d8cd2b8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_create_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_create_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_CreateDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_async.py new file mode 100644 index 000000000000..05dcc3d8258a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + await client.delete_chunk(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_sync.py new file mode 100644 index 000000000000..23877de88d6e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_chunk_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + client.delete_chunk(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_async.py new file mode 100644 index 000000000000..f09bc255c979 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + await client.delete_corpus(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_sync.py new file mode 100644 index 000000000000..931df87678a8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_corpus_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + client.delete_corpus(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_async.py new file mode 100644 index 000000000000..040e1e686eb6 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_delete_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_sync.py new file mode 100644 index 000000000000..46c027046aa3 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_delete_document_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_delete_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + +# [END generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_async.py new file mode 100644 index 000000000000..deb7be6f8ade --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_sync.py new file mode 100644 index 000000000000..7f6969ccbfe2 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_chunk_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = client.get_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_async.py new file mode 100644 index 000000000000..fe453dd4b86a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_sync.py new file mode 100644 index 000000000000..cca86a7eb166 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_corpus_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = client.get_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_async.py new file mode 100644 index 000000000000..d95ee2bf514e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_get_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_sync.py new file mode 100644 index 000000000000..030a8275119f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_get_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_get_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_GetDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_async.py new file mode 100644 index 000000000000..5dcfaf1c6e67 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_sync.py new file mode 100644 index 000000000000..3fa75b624f77 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_chunks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_chunks(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_async.py new file mode 100644 index 000000000000..ad59fb168c4a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCorpora +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListCorpora_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_corpora(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListCorpora_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_sync.py new file mode 100644 index 000000000000..4bfd3bf558da --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_corpora_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCorpora +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListCorpora_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_corpora(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListCorpora_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_async.py new file mode 100644 index 000000000000..d9502d1da332 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_list_documents(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListDocuments_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_sync.py new file mode 100644 index 000000000000..dbfb2cafe0e9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_list_documents(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_ListDocuments_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_async.py new file mode 100644 index 000000000000..c75fcdb56251 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_query_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_sync.py new file mode 100644 index 000000000000..ae3815bca1a0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_corpus_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_query_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_async.py new file mode 100644 index 000000000000..09b1c3c683ce --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_QueryDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_query_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_QueryDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_sync.py new file mode 100644 index 000000000000..e5caba8a23ad --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_query_document_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_QueryDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_query_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_QueryDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_async.py new file mode 100644 index 000000000000..d3e54b8aadb4 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = await client.update_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_sync.py new file mode 100644 index 000000000000..47b7566cca7e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_chunk_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_chunk(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1beta.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1beta.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = client.update_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_async.py new file mode 100644 index 000000000000..5c036d4ae8c7 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCorpusRequest( + ) + + # Make the request + response = await client.update_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_sync.py new file mode 100644 index 000000000000..a587467ffc75 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_corpus_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_corpus(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateCorpusRequest( + ) + + # Make the request + response = client.update_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_async.py new file mode 100644 index 000000000000..94a1218df8b7 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_update_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_sync.py new file mode 100644 index 000000000000..e5b6912241e1 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_retriever_service_update_document_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_update_document(): + # Create a client + client = generativelanguage_v1beta.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_async.py new file mode 100644 index 000000000000..a8c997c7d26b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_BatchEmbedText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.batch_embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_BatchEmbedText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_sync.py new file mode 100644 index 000000000000..c63ae3710337 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_batch_embed_text_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_BatchEmbedText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.batch_embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_BatchEmbedText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_async.py new file mode 100644 index 000000000000..b803a7f4a96b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTextTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_CountTextTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_text_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_CountTextTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_sync.py new file mode 100644 index 000000000000..4e79ec849d76 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_count_text_tokens_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTextTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_CountTextTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_text_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_CountTextTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_async.py new file mode 100644 index 000000000000..69b05bce6998 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_EmbedText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_EmbedText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_sync.py new file mode 100644 index 000000000000..71b17a5ea390 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_embed_text_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_EmbedText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_embed_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1beta.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_EmbedText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_async.py new file mode 100644 index 000000000000..0e1630d6729d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_GenerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_GenerateText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_sync.py new file mode 100644 index 000000000000..4b86847af94c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_text_service_generate_text_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_TextService_GenerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_generate_text(): + # Create a client + client = generativelanguage_v1beta.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1beta.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1beta.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_TextService_GenerateText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json new file mode 100644 index 000000000000..42e70ad8eb68 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -0,0 +1,1190 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.ai.generativelanguage.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-ai-generativelanguage", + "version": "0.5.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1_generated_generative_service_batch_embed_contents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_batch_embed_contents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1_generated_generative_service_batch_embed_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_BatchEmbedContents_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_batch_embed_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1_generated_generative_service_count_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_CountTokens_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_count_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1_generated_generative_service_count_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_CountTokens_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_count_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1_generated_generative_service_embed_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_EmbedContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_embed_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1_generated_generative_service_embed_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_EmbedContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_embed_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1_generated_generative_service_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_GenerateContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1_generated_generative_service_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_GenerateContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceAsyncClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1_generated_generative_service_stream_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_stream_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.GenerativeServiceClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1_generated_generative_service_stream_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_GenerativeService_StreamGenerateContent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_generative_service_stream_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.ModelServiceAsyncClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1_generated_model_service_get_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_ModelService_GetModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_model_service_get_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.ModelServiceClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1_generated_model_service_get_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_ModelService_GetModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_model_service_get_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1.ModelServiceAsyncClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.services.model_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1_generated_model_service_list_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_ModelService_ListModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_model_service_list_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1.ModelServiceClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1.services.model_service.pagers.ListModelsPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1_generated_model_service_list_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1_generated_ModelService_ListModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1_generated_model_service_list_models_sync.py" + } + ] +} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json new file mode 100644 index 000000000000..1d7572058efc --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -0,0 +1,7440 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.ai.generativelanguage.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-ai-generativelanguage", + "version": "0.5.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceAsyncClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1beta_generated_discuss_service_count_message_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_discuss_service_count_message_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1beta_generated_discuss_service_count_message_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_DiscussService_CountMessageTokens_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_discuss_service_count_message_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceAsyncClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1beta_generated_discuss_service_generate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_DiscussService_GenerateMessage_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_discuss_service_generate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.DiscussServiceClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1beta_generated_discuss_service_generate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_DiscussService_GenerateMessage_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_discuss_service_generate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1beta_generated_generative_service_batch_embed_contents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_batch_embed_contents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1beta_generated_generative_service_batch_embed_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_BatchEmbedContents_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_batch_embed_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1beta_generated_generative_service_count_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_CountTokens_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_count_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1beta_generated_generative_service_count_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_CountTokens_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_count_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1beta.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1beta_generated_generative_service_embed_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_EmbedContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_embed_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1beta.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1beta_generated_generative_service_embed_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_EmbedContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_embed_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.generate_answer", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.GenerateAnswer", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateAnswer" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "safety_settings", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]" + }, + { + "name": "answer_style", + "type": "google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse", + "shortName": "generate_answer" + }, + "description": "Sample for GenerateAnswer", + "file": "generativelanguage_v1beta_generated_generative_service_generate_answer_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_generate_answer_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.generate_answer", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.GenerateAnswer", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateAnswer" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "safety_settings", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]" + }, + { + "name": "answer_style", + "type": "google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse", + "shortName": "generate_answer" + }, + "description": "Sample for GenerateAnswer", + "file": "generativelanguage_v1beta_generated_generative_service_generate_answer_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_GenerateAnswer_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_generate_answer_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1beta_generated_generative_service_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_GenerateContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1beta_generated_generative_service_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_GenerateContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceAsyncClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1beta_generated_generative_service_stream_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_stream_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.GenerativeServiceClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1beta.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1beta_generated_generative_service_stream_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_GenerativeService_StreamGenerateContent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_generative_service_stream_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.create_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.CreateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "CreateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1beta.types.TunedModel" + }, + { + "name": "tuned_model_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_tuned_model" + }, + "description": "Sample for CreateTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_create_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_CreateTunedModel_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_create_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.create_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.CreateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "CreateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1beta.types.TunedModel" + }, + { + "name": "tuned_model_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_tuned_model" + }, + "description": "Sample for CreateTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_create_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_CreateTunedModel_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_create_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.delete_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.DeleteTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "DeleteTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tuned_model" + }, + "description": "Sample for DeleteTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_delete_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_delete_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.delete_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.DeleteTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "DeleteTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tuned_model" + }, + "description": "Sample for DeleteTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_delete_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_DeleteTunedModel_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_delete_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1beta_generated_model_service_get_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_GetModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_get_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1beta_generated_model_service_get_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_GetModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_get_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.get_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.GetTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TunedModel", + "shortName": "get_tuned_model" + }, + "description": "Sample for GetTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_get_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_GetTunedModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_get_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.get_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.GetTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TunedModel", + "shortName": "get_tuned_model" + }, + "description": "Sample for GetTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_get_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_GetTunedModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_get_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.model_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1beta_generated_model_service_list_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_ListModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_list_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.model_service.pagers.ListModelsPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1beta_generated_model_service_list_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_ListModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_list_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.list_tuned_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.ListTunedModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListTunedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.model_service.pagers.ListTunedModelsAsyncPager", + "shortName": "list_tuned_models" + }, + "description": "Sample for ListTunedModels", + "file": "generativelanguage_v1beta_generated_model_service_list_tuned_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_ListTunedModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_list_tuned_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.list_tuned_models", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.ListTunedModels", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListTunedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListTunedModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.model_service.pagers.ListTunedModelsPager", + "shortName": "list_tuned_models" + }, + "description": "Sample for ListTunedModels", + "file": "generativelanguage_v1beta_generated_model_service_list_tuned_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_ListTunedModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_list_tuned_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.update_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.UpdateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "UpdateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1beta.types.TunedModel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TunedModel", + "shortName": "update_tuned_model" + }, + "description": "Sample for UpdateTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_update_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_update_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.ModelServiceClient.update_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService.UpdateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.ModelService", + "shortName": "ModelService" + }, + "shortName": "UpdateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1beta.types.TunedModel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TunedModel", + "shortName": "update_tuned_model" + }, + "description": "Sample for UpdateTunedModel", + "file": "generativelanguage_v1beta_generated_model_service_update_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_ModelService_UpdateTunedModel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_model_service_update_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.create_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.CreatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "CreatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreatePermissionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1beta.types.Permission" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "create_permission" + }, + "description": "Sample for CreatePermission", + "file": "generativelanguage_v1beta_generated_permission_service_create_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_CreatePermission_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_create_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.create_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.CreatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "CreatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreatePermissionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1beta.types.Permission" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "create_permission" + }, + "description": "Sample for CreatePermission", + "file": "generativelanguage_v1beta_generated_permission_service_create_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_CreatePermission_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_create_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.delete_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.DeletePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "DeletePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeletePermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_permission" + }, + "description": "Sample for DeletePermission", + "file": "generativelanguage_v1beta_generated_permission_service_delete_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_DeletePermission_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_delete_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.delete_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.DeletePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "DeletePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeletePermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_permission" + }, + "description": "Sample for DeletePermission", + "file": "generativelanguage_v1beta_generated_permission_service_delete_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_DeletePermission_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_delete_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.get_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.GetPermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "GetPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "get_permission" + }, + "description": "Sample for GetPermission", + "file": "generativelanguage_v1beta_generated_permission_service_get_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_GetPermission_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_get_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.get_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.GetPermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "GetPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "get_permission" + }, + "description": "Sample for GetPermission", + "file": "generativelanguage_v1beta_generated_permission_service_get_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_GetPermission_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_get_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.list_permissions", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.ListPermissions", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "ListPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListPermissionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.permission_service.pagers.ListPermissionsAsyncPager", + "shortName": "list_permissions" + }, + "description": "Sample for ListPermissions", + "file": "generativelanguage_v1beta_generated_permission_service_list_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_ListPermissions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_list_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.list_permissions", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.ListPermissions", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "ListPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListPermissionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.permission_service.pagers.ListPermissionsPager", + "shortName": "list_permissions" + }, + "description": "Sample for ListPermissions", + "file": "generativelanguage_v1beta_generated_permission_service_list_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_ListPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_list_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.transfer_ownership", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.TransferOwnership", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "TransferOwnership" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.TransferOwnershipRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TransferOwnershipResponse", + "shortName": "transfer_ownership" + }, + "description": "Sample for TransferOwnership", + "file": "generativelanguage_v1beta_generated_permission_service_transfer_ownership_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_TransferOwnership_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_transfer_ownership_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.transfer_ownership", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.TransferOwnership", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "TransferOwnership" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.TransferOwnershipRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.TransferOwnershipResponse", + "shortName": "transfer_ownership" + }, + "description": "Sample for TransferOwnership", + "file": "generativelanguage_v1beta_generated_permission_service_transfer_ownership_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_TransferOwnership_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_transfer_ownership_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceAsyncClient.update_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.UpdatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "UpdatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdatePermissionRequest" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1beta.types.Permission" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "update_permission" + }, + "description": "Sample for UpdatePermission", + "file": "generativelanguage_v1beta_generated_permission_service_update_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_UpdatePermission_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_update_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PermissionServiceClient.update_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService.UpdatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "UpdatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdatePermissionRequest" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1beta.types.Permission" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Permission", + "shortName": "update_permission" + }, + "description": "Sample for UpdatePermission", + "file": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PermissionService_UpdatePermission_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.batch_create_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchCreateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchCreateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchCreateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchCreateChunksResponse", + "shortName": "batch_create_chunks" + }, + "description": "Sample for BatchCreateChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.batch_create_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchCreateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchCreateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchCreateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchCreateChunksResponse", + "shortName": "batch_create_chunks" + }, + "description": "Sample for BatchCreateChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchCreateChunks_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_create_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.batch_delete_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchDeleteChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchDeleteChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchDeleteChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "batch_delete_chunks" + }, + "description": "Sample for BatchDeleteChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.batch_delete_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchDeleteChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchDeleteChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchDeleteChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "batch_delete_chunks" + }, + "description": "Sample for BatchDeleteChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchDeleteChunks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_delete_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.batch_update_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchUpdateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchUpdateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchUpdateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchUpdateChunksResponse", + "shortName": "batch_update_chunks" + }, + "description": "Sample for BatchUpdateChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.batch_update_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.BatchUpdateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchUpdateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchUpdateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchUpdateChunksResponse", + "shortName": "batch_update_chunks" + }, + "description": "Sample for BatchUpdateChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_BatchUpdateChunks_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_batch_update_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.create_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateChunkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1beta.types.Chunk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "create_chunk" + }, + "description": "Sample for CreateChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_create_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateChunk_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.create_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateChunkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1beta.types.Chunk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "create_chunk" + }, + "description": "Sample for CreateChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_create_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateChunk_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.create_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1beta.types.Corpus" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "create_corpus" + }, + "description": "Sample for CreateCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_create_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.create_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1beta.types.Corpus" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "create_corpus" + }, + "description": "Sample for CreateCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_create_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateCorpus_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.create_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1beta.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.create_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.CreateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1beta.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_CreateDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.delete_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_chunk" + }, + "description": "Sample for DeleteChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.delete_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_chunk" + }, + "description": "Sample for DeleteChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteChunk_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.delete_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_corpus" + }, + "description": "Sample for DeleteCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.delete_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_corpus" + }, + "description": "Sample for DeleteCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteCorpus_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.delete_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.delete_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.DeleteDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.get_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "get_chunk" + }, + "description": "Sample for GetChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_get_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetChunk_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.get_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "get_chunk" + }, + "description": "Sample for GetChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_get_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetChunk_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.get_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "get_corpus" + }, + "description": "Sample for GetCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_get_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetCorpus_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.get_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "get_corpus" + }, + "description": "Sample for GetCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_get_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetCorpus_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.get_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.get_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.GetDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.list_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListChunksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListChunksAsyncPager", + "shortName": "list_chunks" + }, + "description": "Sample for ListChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_list_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListChunks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.list_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListChunksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListChunksPager", + "shortName": "list_chunks" + }, + "description": "Sample for ListChunks", + "file": "generativelanguage_v1beta_generated_retriever_service_list_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListChunks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.list_corpora", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListCorpora", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListCorpora" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListCorporaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListCorporaAsyncPager", + "shortName": "list_corpora" + }, + "description": "Sample for ListCorpora", + "file": "generativelanguage_v1beta_generated_retriever_service_list_corpora_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListCorpora_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_corpora_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.list_corpora", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListCorpora", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListCorpora" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListCorporaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListCorporaPager", + "shortName": "list_corpora" + }, + "description": "Sample for ListCorpora", + "file": "generativelanguage_v1beta_generated_retriever_service_list_corpora_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListCorpora_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_corpora_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.list_documents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListDocuments", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "generativelanguage_v1beta_generated_retriever_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.list_documents", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.ListDocuments", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.services.retriever_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "generativelanguage_v1beta_generated_retriever_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.query_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.QueryCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.QueryCorpusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.QueryCorpusResponse", + "shortName": "query_corpus" + }, + "description": "Sample for QueryCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_query_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_query_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.query_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.QueryCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.QueryCorpusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.QueryCorpusResponse", + "shortName": "query_corpus" + }, + "description": "Sample for QueryCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_query_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_QueryCorpus_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_query_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.query_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.QueryDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.QueryDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.QueryDocumentResponse", + "shortName": "query_document" + }, + "description": "Sample for QueryDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_query_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_QueryDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_query_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.query_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.QueryDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.QueryDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.QueryDocumentResponse", + "shortName": "query_document" + }, + "description": "Sample for QueryDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_query_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_QueryDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_query_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.update_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateChunkRequest" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1beta.types.Chunk" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "update_chunk" + }, + "description": "Sample for UpdateChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_update_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.update_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateChunkRequest" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1beta.types.Chunk" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Chunk", + "shortName": "update_chunk" + }, + "description": "Sample for UpdateChunk", + "file": "generativelanguage_v1beta_generated_retriever_service_update_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateChunk_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.update_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1beta.types.Corpus" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "update_corpus" + }, + "description": "Sample for UpdateCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_update_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.update_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1beta.types.Corpus" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Corpus", + "shortName": "update_corpus" + }, + "description": "Sample for UpdateCorpus", + "file": "generativelanguage_v1beta_generated_retriever_service_update_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateCorpus_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceAsyncClient.update_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1beta.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.RetrieverServiceClient.update_document", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService.UpdateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1beta.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "generativelanguage_v1beta_generated_retriever_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_RetrieverService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_retriever_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient.batch_embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.BatchEmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "BatchEmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchEmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "texts", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchEmbedTextResponse", + "shortName": "batch_embed_text" + }, + "description": "Sample for BatchEmbedText", + "file": "generativelanguage_v1beta_generated_text_service_batch_embed_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_BatchEmbedText_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_batch_embed_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient.batch_embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.BatchEmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "BatchEmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.BatchEmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "texts", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.BatchEmbedTextResponse", + "shortName": "batch_embed_text" + }, + "description": "Sample for BatchEmbedText", + "file": "generativelanguage_v1beta_generated_text_service_batch_embed_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_BatchEmbedText_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_batch_embed_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient.count_text_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.CountTextTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "CountTextTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountTextTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.TextPrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountTextTokensResponse", + "shortName": "count_text_tokens" + }, + "description": "Sample for CountTextTokens", + "file": "generativelanguage_v1beta_generated_text_service_count_text_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_CountTextTokens_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_count_text_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient.count_text_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.CountTextTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "CountTextTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.CountTextTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.TextPrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.CountTextTokensResponse", + "shortName": "count_text_tokens" + }, + "description": "Sample for CountTextTokens", + "file": "generativelanguage_v1beta_generated_text_service_count_text_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_CountTextTokens_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_count_text_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1beta_generated_text_service_embed_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_EmbedText_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_embed_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1beta_generated_text_service_embed_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_EmbedText_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_embed_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceAsyncClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1beta_generated_text_service_generate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_GenerateText_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_generate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.TextServiceClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1beta.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1beta_generated_text_service_generate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_TextService_GenerateText_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_text_service_generate_text_sync.py" + } + ] +} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 0d2d254a89dd..9a1af70477ff 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.3.5" + "version": "0.5.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 6cd937cf3082..11d081685140 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.3.5" + "version": "0.5.0" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py new file mode 100644 index 000000000000..e18ab501752d --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class generativelanguageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_embed_contents': ('model', 'requests', ), + 'count_tokens': ('model', 'contents', ), + 'embed_content': ('model', 'content', 'task_type', 'title', ), + 'generate_content': ('model', 'contents', 'safety_settings', 'generation_config', ), + 'get_model': ('name', ), + 'list_models': ('page_size', 'page_token', ), + 'stream_generate_content': ('model', 'contents', 'safety_settings', 'generation_config', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=generativelanguageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the generativelanguage client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py new file mode 100644 index 000000000000..a63589d8f655 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -0,0 +1,220 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class generativelanguageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_chunks': ('requests', 'parent', ), + 'batch_delete_chunks': ('requests', 'parent', ), + 'batch_embed_contents': ('model', 'requests', ), + 'batch_embed_text': ('model', 'texts', 'requests', ), + 'batch_update_chunks': ('requests', 'parent', ), + 'count_message_tokens': ('model', 'prompt', ), + 'count_text_tokens': ('model', 'prompt', ), + 'count_tokens': ('model', 'contents', ), + 'create_chunk': ('parent', 'chunk', ), + 'create_corpus': ('corpus', ), + 'create_document': ('parent', 'document', ), + 'create_permission': ('parent', 'permission', ), + 'create_tuned_model': ('tuned_model', 'tuned_model_id', ), + 'delete_chunk': ('name', ), + 'delete_corpus': ('name', 'force', ), + 'delete_document': ('name', 'force', ), + 'delete_permission': ('name', ), + 'delete_tuned_model': ('name', ), + 'embed_content': ('model', 'content', 'task_type', 'title', ), + 'embed_text': ('model', 'text', ), + 'generate_answer': ('model', 'contents', 'answer_style', 'inline_passages', 'semantic_retriever', 'safety_settings', 'temperature', ), + 'generate_content': ('model', 'contents', 'tools', 'safety_settings', 'generation_config', ), + 'generate_message': ('model', 'prompt', 'temperature', 'candidate_count', 'top_p', 'top_k', ), + 'generate_text': ('model', 'prompt', 'temperature', 'candidate_count', 'max_output_tokens', 'top_p', 'top_k', 'safety_settings', 'stop_sequences', ), + 'get_chunk': ('name', ), + 'get_corpus': ('name', ), + 'get_document': ('name', ), + 'get_model': ('name', ), + 'get_permission': ('name', ), + 'get_tuned_model': ('name', ), + 'list_chunks': ('parent', 'page_size', 'page_token', ), + 'list_corpora': ('page_size', 'page_token', ), + 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_models': ('page_size', 'page_token', ), + 'list_permissions': ('parent', 'page_size', 'page_token', ), + 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), + 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), + 'stream_generate_content': ('model', 'contents', 'tools', 'safety_settings', 'generation_config', ), + 'transfer_ownership': ('name', 'email_address', ), + 'update_chunk': ('chunk', 'update_mask', ), + 'update_corpus': ('corpus', 'update_mask', ), + 'update_document': ('document', 'update_mask', ), + 'update_permission': ('permission', 'update_mask', ), + 'update_tuned_model': ('tuned_model', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=generativelanguageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the generativelanguage client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py new file mode 100644 index 000000000000..60c04af4dfa5 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -0,0 +1,4739 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1.services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, + transports, +) +from google.ai.generativelanguage_v1.types import content +from google.ai.generativelanguage_v1.types import content as gag_content +from google.ai.generativelanguage_v1.types import generative_service, safety + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GenerativeServiceClient._get_default_mtls_endpoint(None) is None + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GenerativeServiceGrpcTransport, "grpc"), + (transports.GenerativeServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_generative_service_client_get_transport_class(): + transport = GenerativeServiceClient.get_transport_class() + available_transports = [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceRestTransport, + ] + assert transport in available_transports + + transport = GenerativeServiceClient.get_transport_class("grpc") + assert transport == transports.GenerativeServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +def test_generative_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "true", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "false", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_generative_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GenerativeServiceClient, GenerativeServiceAsyncClient] +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +def test_generative_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + None, + ), + ], +) +def test_generative_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_generative_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1.services.generative_service.transports.GenerativeServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GenerativeServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_generative_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + response = client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_generate_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + client.generate_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + +@pytest.mark.asyncio +async def test_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + response = await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +@pytest.mark.asyncio +async def test_generate_content_async_from_dict(): + await test_generate_content_async(request_type=dict) + + +def test_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = generative_service.GenerateContentResponse() + client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + response = client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, generative_service.GenerateContentResponse) + + +def test_stream_generate_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + client.stream_generate_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + +@pytest.mark.asyncio +async def test_stream_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + response = await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, generative_service.GenerateContentResponse) + + +@pytest.mark.asyncio +async def test_stream_generate_content_async_from_dict(): + await test_stream_generate_content_async(request_type=dict) + + +def test_stream_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = iter([generative_service.GenerateContentResponse()]) + client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stream_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_stream_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_stream_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + response = client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +def test_embed_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + client.embed_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + +@pytest.mark.asyncio +async def test_embed_content_async( + transport: str = "grpc_asyncio", request_type=generative_service.EmbedContentRequest +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + response = await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +@pytest.mark.asyncio +async def test_embed_content_async_from_dict(): + await test_embed_content_async(request_type=dict) + + +def test_embed_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = generative_service.EmbedContentResponse() + client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +def test_embed_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.asyncio +async def test_embed_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + response = client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +def test_batch_embed_contents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + client.batch_embed_contents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async( + transport: str = "grpc_asyncio", + request_type=generative_service.BatchEmbedContentsRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + response = await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async_from_dict(): + await test_batch_embed_contents_async(request_type=dict) + + +def test_batch_embed_contents_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = generative_service.BatchEmbedContentsResponse() + client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_embed_contents_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_batch_embed_contents_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +def test_batch_embed_contents_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse( + total_tokens=1303, + ) + response = client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +def test_count_tokens_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + client.count_tokens() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + +@pytest.mark.asyncio +async def test_count_tokens_async( + transport: str = "grpc_asyncio", request_type=generative_service.CountTokensRequest +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse( + total_tokens=1303, + ) + ) + response = await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +@pytest.mark.asyncio +async def test_count_tokens_async_from_dict(): + await test_count_tokens_async(request_type=dict) + + +def test_count_tokens_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = generative_service.CountTokensResponse() + client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_tokens_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_tokens_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_count_tokens_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + + client.generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.GenerateContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_content(request) + + +def test_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{model=models/*}:generateContent" % client.transport._host, args[1] + ) + + +def test_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.stream_generate_content(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_stream_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.stream_generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stream_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stream_generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stream_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_stream_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_stream_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + + client.stream_generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stream_generate_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.GenerateContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stream_generate_content(request) + + +def test_stream_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.stream_generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{model=models/*}:streamGenerateContent" % client.transport._host, + args[1], + ) + + +def test_stream_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_stream_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.embed_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +def test_embed_content_rest_required_fields( + request_type=generative_service.EmbedContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.embed_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "content", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_embed_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.EmbedContentRequest.pb( + generative_service.EmbedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.EmbedContentResponse.to_json( + generative_service.EmbedContentResponse() + ) + + request = generative_service.EmbedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.EmbedContentResponse() + + client.embed_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_embed_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.EmbedContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.embed_content(request) + + +def test_embed_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.embed_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{model=models/*}:embedContent" % client.transport._host, args[1] + ) + + +def test_embed_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +def test_embed_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_embed_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +def test_batch_embed_contents_rest_required_fields( + request_type=generative_service.BatchEmbedContentsRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_embed_contents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_embed_contents_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_embed_contents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_embed_contents_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_batch_embed_contents" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_batch_embed_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.BatchEmbedContentsRequest.pb( + generative_service.BatchEmbedContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + generative_service.BatchEmbedContentsResponse.to_json( + generative_service.BatchEmbedContentsResponse() + ) + ) + + request = generative_service.BatchEmbedContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.BatchEmbedContentsResponse() + + client.batch_embed_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_embed_contents_rest_bad_request( + transport: str = "rest", request_type=generative_service.BatchEmbedContentsRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_embed_contents(request) + + +def test_batch_embed_contents_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_embed_contents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{model=models/*}:batchEmbedContents" % client.transport._host, + args[1], + ) + + +def test_batch_embed_contents_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +def test_batch_embed_contents_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse( + total_tokens=1303, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.count_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +def test_count_tokens_rest_required_fields( + request_type=generative_service.CountTokensRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.count_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_tokens_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_tokens_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_count_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.CountTokensRequest.pb( + generative_service.CountTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.CountTokensResponse.to_json( + generative_service.CountTokensResponse() + ) + + request = generative_service.CountTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.CountTokensResponse() + + client.count_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_count_tokens_rest_bad_request( + transport: str = "rest", request_type=generative_service.CountTokensRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.count_tokens(request) + + +def test_count_tokens_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.count_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{model=models/*}:countTokens" % client.transport._host, args[1] + ) + + +def test_count_tokens_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_count_tokens_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GenerativeServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = GenerativeServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GenerativeServiceGrpcTransport, + ) + + +def test_generative_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_generative_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1.services.generative_service.transports.GenerativeServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_content", + "stream_generate_content", + "embed_content", + "batch_embed_contents", + "count_tokens", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_generative_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_generative_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport() + adc.assert_called_once() + + +def test_generative_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GenerativeServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_generative_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GenerativeServiceGrpcTransport, grpc_helpers), + (transports.GenerativeServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_generative_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_generative_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GenerativeServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_no_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_with_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_generative_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GenerativeServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GenerativeServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_content._session + session2 = client2.transport.generate_content._session + assert session1 != session2 + session1 = client1.transport.stream_generate_content._session + session2 = client2.transport.stream_generate_content._session + assert session1 != session2 + session1 = client1.transport.embed_content._session + session2 = client2.transport.embed_content._session + assert session1 != session2 + session1 = client1.transport.batch_embed_contents._session + session2 = client2.transport.batch_embed_contents._session + assert session1 != session2 + session1 = client1.transport.count_tokens._session + session2 = client2.transport.count_tokens._session + assert session1 != session2 + + +def test_generative_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_generative_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = GenerativeServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = GenerativeServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GenerativeServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = GenerativeServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GenerativeServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = GenerativeServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GenerativeServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = GenerativeServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = GenerativeServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = GenerativeServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GenerativeServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = GenerativeServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GenerativeServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "operations"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py new file mode 100644 index 000000000000..115f2efdd589 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -0,0 +1,3204 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1.services.model_service import ( + ModelServiceAsyncClient, + ModelServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1.types import model, model_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ModelServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ModelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ModelServiceGrpcTransport, "grpc"), + (transports.ModelServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_model_service_client_get_transport_class(): + transport = ModelServiceClient.get_transport_class() + available_transports = [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceRestTransport, + ] + assert transport in available_transports + + transport = ModelServiceClient.get_transport_class("grpc") + assert transport == transports.ModelServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "true"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "false"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "true"), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_model_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ModelServiceClient, ModelServiceAsyncClient]) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", None), + ], +) +def test_model_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_model_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1.services.model_service.transports.ModelServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ModelServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_model_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + response = client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + client.get_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + +@pytest.mark.asyncio +async def test_get_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + ) + response = await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +@pytest.mark.asyncio +async def test_get_model_async_from_dict(): + await test_get_model_async(request_type=dict) + + +def test_get_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = model.Model() + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + client.list_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + +@pytest.mark.asyncio +async def test_list_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_models_async_from_dict(): + await test_list_models_async(request_type=dict) + + +def test_list_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_models(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + +def test_list_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = list(client.list_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, model.Model) for i in responses) + + +@pytest.mark.asyncio +async def test_list_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_rest_required_fields(request_type=model_service.GetModelRequest): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = model.Model() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model.Model.to_json(model.Model()) + + request = model_service.GetModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model.Model() + + client.get_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_model_rest_bad_request( + transport: str = "rest", request_type=model_service.GetModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_model(request) + + +def test_get_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=models/*}" % client.transport._host, args[1] + ) + + +def test_get_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +def test_get_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.ListModelsRequest.pb( + model_service.ListModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model_service.ListModelsResponse.to_json( + model_service.ListModelsResponse() + ) + + request = model_service.ListModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListModelsResponse() + + client.list_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_models_rest_bad_request( + transport: str = "rest", request_type=model_service.ListModelsRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_models(request) + + +def test_list_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/models" % client.transport._host, args[1]) + + +def test_list_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(model_service.ListModelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + pages = list(client.list_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ModelServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ModelServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ModelServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ModelServiceGrpcTransport, + ) + + +def test_model_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_model_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1.services.model_service.transports.ModelServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_model", + "list_models", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_model_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_model_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport() + adc.assert_called_once() + + +def test_model_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ModelServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + ], +) +def test_model_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_model_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ModelServiceGrpcTransport, grpc_helpers), + (transports.ModelServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_model_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_model_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ModelServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_no_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_with_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_model_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ModelServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ModelServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_model._session + session2 = client2.transport.get_model._session + assert session1 != session2 + session1 = client1.transport.list_models._session + session2 = client2.transport.list_models._session + assert session1 != session2 + + +def test_model_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_model_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = ModelServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = ModelServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ModelServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ModelServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ModelServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ModelServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ModelServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ModelServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ModelServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ModelServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ModelServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ModelServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ModelServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "operations"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport), + (ModelServiceAsyncClient, transports.ModelServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py new file mode 100644 index 000000000000..b9d8d3c8c612 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py @@ -0,0 +1,2553 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.discuss_service import ( + DiscussServiceAsyncClient, + DiscussServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta.types import citation, discuss_service, safety + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DiscussServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DiscussServiceGrpcTransport, "grpc"), + (transports.DiscussServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_discuss_service_client_get_transport_class(): + transport = DiscussServiceClient.get_transport_class() + available_transports = [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceRestTransport, + ] + assert transport in available_transports + + transport = DiscussServiceClient.get_transport_class("grpc") + assert transport == transports.DiscussServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +def test_discuss_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "true"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "false"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "true"), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_discuss_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DiscussServiceClient, DiscussServiceAsyncClient] +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +def test_discuss_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", None), + ], +) +def test_discuss_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_discuss_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.discuss_service.transports.DiscussServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DiscussServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_discuss_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + response = client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +def test_generate_message_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + client.generate_message() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + +@pytest.mark.asyncio +async def test_generate_message_async( + transport: str = "grpc_asyncio", request_type=discuss_service.GenerateMessageRequest +): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + response = await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +@pytest.mark.asyncio +async def test_generate_message_async_from_dict(): + await test_generate_message_async(request_type=dict) + + +def test_generate_message_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = discuss_service.GenerateMessageResponse() + client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_message_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_message_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_message_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_message_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_message_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + response = client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +def test_count_message_tokens_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + client.count_message_tokens() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + +@pytest.mark.asyncio +async def test_count_message_tokens_async( + transport: str = "grpc_asyncio", + request_type=discuss_service.CountMessageTokensRequest, +): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + ) + response = await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.asyncio +async def test_count_message_tokens_async_from_dict(): + await test_count_message_tokens_async(request_type=dict) + + +def test_count_message_tokens_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = discuss_service.CountMessageTokensResponse() + client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_message_tokens_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_message_tokens_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +def test_count_message_tokens_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +def test_generate_message_rest_required_fields( + request_type=discuss_service.GenerateMessageRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_message(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_message_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_message._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_message_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_generate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = discuss_service.GenerateMessageRequest.pb( + discuss_service.GenerateMessageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discuss_service.GenerateMessageResponse.to_json( + discuss_service.GenerateMessageResponse() + ) + + request = discuss_service.GenerateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.GenerateMessageResponse() + + client.generate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_message_rest_bad_request( + transport: str = "rest", request_type=discuss_service.GenerateMessageRequest +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_message(request) + + +def test_generate_message_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_message(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:generateMessage" % client.transport._host, + args[1], + ) + + +def test_generate_message_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +def test_generate_message_rest_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.count_message_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +def test_count_message_tokens_rest_required_fields( + request_type=discuss_service.CountMessageTokensRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.count_message_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_message_tokens_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_message_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_message_tokens_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_count_message_tokens" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = discuss_service.CountMessageTokensRequest.pb( + discuss_service.CountMessageTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discuss_service.CountMessageTokensResponse.to_json( + discuss_service.CountMessageTokensResponse() + ) + + request = discuss_service.CountMessageTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.CountMessageTokensResponse() + + client.count_message_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_count_message_tokens_rest_bad_request( + transport: str = "rest", request_type=discuss_service.CountMessageTokensRequest +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.count_message_tokens(request) + + +def test_count_message_tokens_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.count_message_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:countMessageTokens" % client.transport._host, + args[1], + ) + + +def test_count_message_tokens_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +def test_count_message_tokens_rest_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DiscussServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DiscussServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DiscussServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DiscussServiceGrpcTransport, + ) + + +def test_discuss_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_discuss_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.discuss_service.transports.DiscussServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_message", + "count_message_tokens", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_discuss_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_discuss_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport() + adc.assert_called_once() + + +def test_discuss_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DiscussServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_discuss_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DiscussServiceGrpcTransport, grpc_helpers), + (transports.DiscussServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_discuss_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_discuss_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DiscussServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_no_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_with_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_discuss_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DiscussServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DiscussServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_message._session + session2 = client2.transport.generate_message._session + assert session1 != session2 + session1 = client1.transport.count_message_tokens._session + session2 = client2.transport.count_message_tokens._session + assert session1 != session2 + + +def test_discuss_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_discuss_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = DiscussServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = DiscussServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DiscussServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = DiscussServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DiscussServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DiscussServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DiscussServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DiscussServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = DiscussServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DiscussServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DiscussServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DiscussServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DiscussServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DiscussServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport), + (DiscussServiceAsyncClient, transports.DiscussServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py new file mode 100644 index 000000000000..ebf8f86f1b02 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -0,0 +1,4722 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta.types import ( + generative_service, + retriever, + safety, +) +from google.ai.generativelanguage_v1beta.types import content +from google.ai.generativelanguage_v1beta.types import content as gag_content + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GenerativeServiceClient._get_default_mtls_endpoint(None) is None + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GenerativeServiceGrpcTransport, "grpc"), + (transports.GenerativeServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_generative_service_client_get_transport_class(): + transport = GenerativeServiceClient.get_transport_class() + available_transports = [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceRestTransport, + ] + assert transport in available_transports + + transport = GenerativeServiceClient.get_transport_class("grpc") + assert transport == transports.GenerativeServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +def test_generative_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "true", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "false", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_generative_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GenerativeServiceClient, GenerativeServiceAsyncClient] +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +def test_generative_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + None, + ), + ], +) +def test_generative_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_generative_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.generative_service.transports.GenerativeServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GenerativeServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_generative_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + response = client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_generate_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + client.generate_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + +@pytest.mark.asyncio +async def test_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + response = await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +@pytest.mark.asyncio +async def test_generate_content_async_from_dict(): + await test_generate_content_async(request_type=dict) + + +def test_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = generative_service.GenerateContentResponse() + client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateAnswerRequest, + dict, + ], +) +def test_generate_answer(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + response = client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateAnswerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +def test_generate_answer_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + client.generate_answer() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateAnswerRequest() + + +@pytest.mark.asyncio +async def test_generate_answer_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateAnswerRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + ) + response = await client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateAnswerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_generate_answer_async_from_dict(): + await test_generate_answer_async(request_type=dict) + + +def test_generate_answer_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateAnswerRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value = generative_service.GenerateAnswerResponse() + client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_answer_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateAnswerRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse() + ) + await client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_answer_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_answer( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + arg = args[0].safety_settings + mock_val = [ + safety.SafetySetting(category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY) + ] + assert arg == mock_val + arg = args[0].answer_style + mock_val = generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE + assert arg == mock_val + + +def test_generate_answer_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +@pytest.mark.asyncio +async def test_generate_answer_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_answer( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + arg = args[0].safety_settings + mock_val = [ + safety.SafetySetting(category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY) + ] + assert arg == mock_val + arg = args[0].answer_style + mock_val = generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_answer_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + response = client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, generative_service.GenerateContentResponse) + + +def test_stream_generate_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + client.stream_generate_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + +@pytest.mark.asyncio +async def test_stream_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + response = await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest() + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, generative_service.GenerateContentResponse) + + +@pytest.mark.asyncio +async def test_stream_generate_content_async_from_dict(): + await test_stream_generate_content_async(request_type=dict) + + +def test_stream_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = iter([generative_service.GenerateContentResponse()]) + client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stream_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_stream_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_stream_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + response = client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +def test_embed_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + client.embed_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + +@pytest.mark.asyncio +async def test_embed_content_async( + transport: str = "grpc_asyncio", request_type=generative_service.EmbedContentRequest +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + response = await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +@pytest.mark.asyncio +async def test_embed_content_async_from_dict(): + await test_embed_content_async(request_type=dict) + + +def test_embed_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = generative_service.EmbedContentResponse() + client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +def test_embed_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.asyncio +async def test_embed_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + response = client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +def test_batch_embed_contents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + client.batch_embed_contents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async( + transport: str = "grpc_asyncio", + request_type=generative_service.BatchEmbedContentsRequest, +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + response = await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async_from_dict(): + await test_batch_embed_contents_async(request_type=dict) + + +def test_batch_embed_contents_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = generative_service.BatchEmbedContentsResponse() + client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_embed_contents_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_batch_embed_contents_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +def test_batch_embed_contents_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse( + total_tokens=1303, + ) + response = client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +def test_count_tokens_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + client.count_tokens() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + +@pytest.mark.asyncio +async def test_count_tokens_async( + transport: str = "grpc_asyncio", request_type=generative_service.CountTokensRequest +): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse( + total_tokens=1303, + ) + ) + response = await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +@pytest.mark.asyncio +async def test_count_tokens_async_from_dict(): + await test_count_tokens_async(request_type=dict) + + +def test_count_tokens_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = generative_service.CountTokensResponse() + client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_tokens_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_tokens_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_count_tokens_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + + client.generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.GenerateContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_content(request) + + +def test_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:generateContent" % client.transport._host, + args[1], + ) + + +def test_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateAnswerRequest, + dict, + ], +) +def test_generate_answer_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_answer(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +def test_generate_answer_rest_required_fields( + request_type=generative_service.GenerateAnswerRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_answer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_answer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_answer(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_answer_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_answer._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + "answerStyle", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_answer_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_generate_answer" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_generate_answer" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.GenerateAnswerRequest.pb( + generative_service.GenerateAnswerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.GenerateAnswerResponse.to_json( + generative_service.GenerateAnswerResponse() + ) + + request = generative_service.GenerateAnswerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateAnswerResponse() + + client.generate_answer( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_answer_rest_bad_request( + transport: str = "rest", request_type=generative_service.GenerateAnswerRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_answer(request) + + +def test_generate_answer_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_answer(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:generateAnswer" % client.transport._host, + args[1], + ) + + +def test_generate_answer_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +def test_generate_answer_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.stream_generate_content(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + + +def test_stream_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.stream_generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stream_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stream_generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stream_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_stream_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_stream_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + + client.stream_generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stream_generate_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.GenerateContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stream_generate_content(request) + + +def test_stream_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.stream_generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:streamGenerateContent" % client.transport._host, + args[1], + ) + + +def test_stream_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_stream_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.embed_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +def test_embed_content_rest_required_fields( + request_type=generative_service.EmbedContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.embed_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "content", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_embed_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.EmbedContentRequest.pb( + generative_service.EmbedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.EmbedContentResponse.to_json( + generative_service.EmbedContentResponse() + ) + + request = generative_service.EmbedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.EmbedContentResponse() + + client.embed_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_embed_content_rest_bad_request( + transport: str = "rest", request_type=generative_service.EmbedContentRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.embed_content(request) + + +def test_embed_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.embed_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:embedContent" % client.transport._host, args[1] + ) + + +def test_embed_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +def test_embed_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_embed_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +def test_batch_embed_contents_rest_required_fields( + request_type=generative_service.BatchEmbedContentsRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_embed_contents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_embed_contents_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_embed_contents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_embed_contents_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_batch_embed_contents" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_batch_embed_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.BatchEmbedContentsRequest.pb( + generative_service.BatchEmbedContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + generative_service.BatchEmbedContentsResponse.to_json( + generative_service.BatchEmbedContentsResponse() + ) + ) + + request = generative_service.BatchEmbedContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.BatchEmbedContentsResponse() + + client.batch_embed_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_embed_contents_rest_bad_request( + transport: str = "rest", request_type=generative_service.BatchEmbedContentsRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_embed_contents(request) + + +def test_batch_embed_contents_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_embed_contents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:batchEmbedContents" % client.transport._host, + args[1], + ) + + +def test_batch_embed_contents_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +def test_batch_embed_contents_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse( + total_tokens=1303, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.count_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + + +def test_count_tokens_rest_required_fields( + request_type=generative_service.CountTokensRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.count_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_tokens_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_tokens_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_count_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = generative_service.CountTokensRequest.pb( + generative_service.CountTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = generative_service.CountTokensResponse.to_json( + generative_service.CountTokensResponse() + ) + + request = generative_service.CountTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.CountTokensResponse() + + client.count_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_count_tokens_rest_bad_request( + transport: str = "rest", request_type=generative_service.CountTokensRequest +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.count_tokens(request) + + +def test_count_tokens_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.count_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:countTokens" % client.transport._host, args[1] + ) + + +def test_count_tokens_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_count_tokens_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GenerativeServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = GenerativeServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GenerativeServiceGrpcTransport, + ) + + +def test_generative_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_generative_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.generative_service.transports.GenerativeServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_content", + "generate_answer", + "stream_generate_content", + "embed_content", + "batch_embed_contents", + "count_tokens", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_generative_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_generative_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport() + adc.assert_called_once() + + +def test_generative_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GenerativeServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_generative_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GenerativeServiceGrpcTransport, grpc_helpers), + (transports.GenerativeServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_generative_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_generative_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GenerativeServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_no_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_with_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_generative_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GenerativeServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GenerativeServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_content._session + session2 = client2.transport.generate_content._session + assert session1 != session2 + session1 = client1.transport.generate_answer._session + session2 = client2.transport.generate_answer._session + assert session1 != session2 + session1 = client1.transport.stream_generate_content._session + session2 = client2.transport.stream_generate_content._session + assert session1 != session2 + session1 = client1.transport.embed_content._session + session2 = client2.transport.embed_content._session + assert session1 != session2 + session1 = client1.transport.batch_embed_contents._session + session2 = client2.transport.batch_embed_contents._session + assert session1 != session2 + session1 = client1.transport.count_tokens._session + session2 = client2.transport.count_tokens._session + assert session1 != session2 + + +def test_generative_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_generative_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = GenerativeServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = GenerativeServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GenerativeServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = GenerativeServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GenerativeServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = GenerativeServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GenerativeServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = GenerativeServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = GenerativeServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = GenerativeServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GenerativeServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = GenerativeServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GenerativeServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GenerativeServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py new file mode 100644 index 000000000000..81074d597f5c --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -0,0 +1,5648 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.model_service import ( + ModelServiceAsyncClient, + ModelServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1beta.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1beta.types import model, model_service +from google.ai.generativelanguage_v1beta.types import tuned_model + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ModelServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ModelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ModelServiceGrpcTransport, "grpc"), + (transports.ModelServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_model_service_client_get_transport_class(): + transport = ModelServiceClient.get_transport_class() + available_transports = [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceRestTransport, + ] + assert transport in available_transports + + transport = ModelServiceClient.get_transport_class("grpc") + assert transport == transports.ModelServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "true"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "false"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "true"), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_model_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ModelServiceClient, ModelServiceAsyncClient]) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", None), + ], +) +def test_model_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_model_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.model_service.transports.ModelServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ModelServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_model_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + response = client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + client.get_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + +@pytest.mark.asyncio +async def test_get_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + ) + response = await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +@pytest.mark.asyncio +async def test_get_model_async_from_dict(): + await test_get_model_async(request_type=dict) + + +def test_get_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = model.Model() + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + client.list_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + +@pytest.mark.asyncio +async def test_list_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_models_async_from_dict(): + await test_list_models_async(request_type=dict) + + +def test_list_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_models(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + +def test_list_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = list(client.list_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, model.Model) for i in responses) + + +@pytest.mark.asyncio +async def test_list_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetTunedModelRequest, + dict, + ], +) +def test_get_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + base_model="base_model_value", + ) + response = client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + + +def test_get_tuned_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + client.get_tuned_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetTunedModelRequest() + + +@pytest.mark.asyncio +async def test_get_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + ) + ) + response = await client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + + +@pytest.mark.asyncio +async def test_get_tuned_model_async_from_dict(): + await test_get_tuned_model_async(request_type=dict) + + +def test_get_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value = tuned_model.TunedModel() + client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel() + ) + await client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListTunedModelsRequest, + dict, + ], +) +def test_list_tuned_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListTunedModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tuned_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + client.list_tuned_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListTunedModelsRequest() + + +@pytest.mark.asyncio +async def test_list_tuned_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListTunedModelsRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListTunedModelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_from_dict(): + await test_list_tuned_models_async(request_type=dict) + + +def test_list_tuned_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tuned_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_tuned_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_tuned_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListTunedModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tuned_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tuned_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_tuned_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_tuned_models(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in results) + + +def test_list_tuned_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tuned_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tuned_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tuned_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.CreateTunedModelRequest, + dict, + ], +) +def test_create_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.CreateTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_tuned_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + client.create_tuned_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.CreateTunedModelRequest() + + +@pytest.mark.asyncio +async def test_create_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.CreateTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.CreateTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_tuned_model_async_from_dict(): + await test_create_tuned_model_async(request_type=dict) + + +def test_create_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].tuned_model_id + mock_val = "tuned_model_id_value" + assert arg == mock_val + + +def test_create_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].tuned_model_id + mock_val = "tuned_model_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.UpdateTunedModelRequest, + dict, + ], +) +def test_update_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + base_model="base_model_value", + ) + response = client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.UpdateTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + + +def test_update_tuned_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + client.update_tuned_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.UpdateTunedModelRequest() + + +@pytest.mark.asyncio +async def test_update_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.UpdateTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + ) + ) + response = await client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.UpdateTunedModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + + +@pytest.mark.asyncio +async def test_update_tuned_model_async_from_dict(): + await test_update_tuned_model_async(request_type=dict) + + +def test_update_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.UpdateTunedModelRequest() + + request.tuned_model.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value = gag_tuned_model.TunedModel() + client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tuned_model.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.UpdateTunedModelRequest() + + request.tuned_model.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel() + ) + await client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tuned_model.name=name_value", + ) in kw["metadata"] + + +def test_update_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.DeleteTunedModelRequest, + dict, + ], +) +def test_delete_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.DeleteTunedModelRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tuned_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + client.delete_tuned_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.DeleteTunedModelRequest() + + +@pytest.mark.asyncio +async def test_delete_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.DeleteTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.DeleteTunedModelRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tuned_model_async_from_dict(): + await test_delete_tuned_model_async(request_type=dict) + + +def test_delete_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.DeleteTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value = None + client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.DeleteTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + top_p=0.546, + top_k=541, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_rest_required_fields(request_type=model_service.GetModelRequest): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = model.Model() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model.Model.to_json(model.Model()) + + request = model_service.GetModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model.Model() + + client.get_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_model_rest_bad_request( + transport: str = "rest", request_type=model_service.GetModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_model(request) + + +def test_get_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=models/*}" % client.transport._host, args[1] + ) + + +def test_get_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +def test_get_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.ListModelsRequest.pb( + model_service.ListModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model_service.ListModelsResponse.to_json( + model_service.ListModelsResponse() + ) + + request = model_service.ListModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListModelsResponse() + + client.list_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_models_rest_bad_request( + transport: str = "rest", request_type=model_service.ListModelsRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_models(request) + + +def test_list_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/models" % client.transport._host, args[1] + ) + + +def test_list_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(model_service.ListModelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + pages = list(client.list_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetTunedModelRequest, + dict, + ], +) +def test_get_tuned_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + base_model="base_model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_tuned_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + + +def test_get_tuned_model_rest_required_fields( + request_type=model_service.GetTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.GetTunedModelRequest.pb( + model_service.GetTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = tuned_model.TunedModel.to_json( + tuned_model.TunedModel() + ) + + request = model_service.GetTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tuned_model.TunedModel() + + client.get_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_tuned_model_rest_bad_request( + transport: str = "rest", request_type=model_service.GetTunedModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_tuned_model(request) + + +def test_get_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=tunedModels/*}" % client.transport._host, args[1] + ) + + +def test_get_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +def test_get_tuned_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListTunedModelsRequest, + dict, + ], +) +def test_list_tuned_models_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListTunedModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tuned_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tuned_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_tuned_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_tuned_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.ListTunedModelsRequest.pb( + model_service.ListTunedModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = model_service.ListTunedModelsResponse.to_json( + model_service.ListTunedModelsResponse() + ) + + request = model_service.ListTunedModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListTunedModelsResponse() + + client.list_tuned_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tuned_models_rest_bad_request( + transport: str = "rest", request_type=model_service.ListTunedModelsRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tuned_models(request) + + +def test_list_tuned_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListTunedModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListTunedModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tuned_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/tunedModels" % client.transport._host, args[1] + ) + + +def test_list_tuned_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_tuned_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + model_service.ListTunedModelsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_tuned_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in results) + + pages = list(client.list_tuned_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.CreateTunedModelRequest, + dict, + ], +) +def test_create_tuned_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["tuned_model"] = { + "tuned_model_source": { + "tuned_model": "tuned_model_value", + "base_model": "base_model_value", + }, + "base_model": "base_model_value", + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "temperature": 0.1198, + "top_p": 0.546, + "top_k": 541, + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "tuning_task": { + "start_time": {}, + "complete_time": {}, + "snapshots": [ + {"step": 444, "epoch": 527, "mean_loss": 0.961, "compute_time": {}} + ], + "training_data": { + "examples": { + "examples": [ + {"text_input": "text_input_value", "output": "output_value"} + ] + } + }, + "hyperparameters": { + "epoch_count": 1175, + "batch_size": 1052, + "learning_rate": 0.1371, + }, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = model_service.CreateTunedModelRequest.meta.fields["tuned_model"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tuned_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tuned_model"][field])): + del request_init["tuned_model"][field][i][subfield] + else: + del request_init["tuned_model"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_tuned_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_tuned_model_rest_required_fields( + request_type=model_service.CreateTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tuned_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("tuned_model_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("tunedModelId",)) & set(("tunedModel",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ModelServiceRestInterceptor, "post_create_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_create_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.CreateTunedModelRequest.pb( + model_service.CreateTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = model_service.CreateTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_tuned_model_rest_bad_request( + transport: str = "rest", request_type=model_service.CreateTunedModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_tuned_model(request) + + +def test_create_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/tunedModels" % client.transport._host, args[1] + ) + + +def test_create_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +def test_create_tuned_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.UpdateTunedModelRequest, + dict, + ], +) +def test_update_tuned_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"tuned_model": {"name": "tunedModels/sample1"}} + request_init["tuned_model"] = { + "tuned_model_source": { + "tuned_model": "tuned_model_value", + "base_model": "base_model_value", + }, + "base_model": "base_model_value", + "name": "tunedModels/sample1", + "display_name": "display_name_value", + "description": "description_value", + "temperature": 0.1198, + "top_p": 0.546, + "top_k": 541, + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "tuning_task": { + "start_time": {}, + "complete_time": {}, + "snapshots": [ + {"step": 444, "epoch": 527, "mean_loss": 0.961, "compute_time": {}} + ], + "training_data": { + "examples": { + "examples": [ + {"text_input": "text_input_value", "output": "output_value"} + ] + } + }, + "hyperparameters": { + "epoch_count": 1175, + "batch_size": 1052, + "learning_rate": 0.1371, + }, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = model_service.UpdateTunedModelRequest.meta.fields["tuned_model"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tuned_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tuned_model"][field])): + del request_init["tuned_model"][field][i][subfield] + else: + del request_init["tuned_model"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + base_model="base_model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_tuned_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + + +def test_update_tuned_model_rest_required_fields( + request_type=model_service.UpdateTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tuned_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "tunedModel", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_update_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_update_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = model_service.UpdateTunedModelRequest.pb( + model_service.UpdateTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gag_tuned_model.TunedModel.to_json( + gag_tuned_model.TunedModel() + ) + + request = model_service.UpdateTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_tuned_model.TunedModel() + + client.update_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_tuned_model_rest_bad_request( + transport: str = "rest", request_type=model_service.UpdateTunedModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"tuned_model": {"name": "tunedModels/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_tuned_model(request) + + +def test_update_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel() + + # get arguments that satisfy an http rule for this method + sample_request = {"tuned_model": {"name": "tunedModels/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{tuned_model.name=tunedModels/*}" % client.transport._host, + args[1], + ) + + +def test_update_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_tuned_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.DeleteTunedModelRequest, + dict, + ], +) +def test_delete_tuned_model_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_tuned_model(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tuned_model_rest_required_fields( + request_type=model_service.DeleteTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_delete_tuned_model" + ) as pre: + pre.assert_not_called() + pb_message = model_service.DeleteTunedModelRequest.pb( + model_service.DeleteTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = model_service.DeleteTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_tuned_model_rest_bad_request( + transport: str = "rest", request_type=model_service.DeleteTunedModelRequest +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_tuned_model(request) + + +def test_delete_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=tunedModels/*}" % client.transport._host, args[1] + ) + + +def test_delete_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +def test_delete_tuned_model_rest_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ModelServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ModelServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ModelServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ModelServiceGrpcTransport, + ) + + +def test_model_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_model_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.model_service.transports.ModelServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_model", + "list_models", + "get_tuned_model", + "list_tuned_models", + "create_tuned_model", + "update_tuned_model", + "delete_tuned_model", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_model_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_model_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport() + adc.assert_called_once() + + +def test_model_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ModelServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + ], +) +def test_model_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_model_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ModelServiceGrpcTransport, grpc_helpers), + (transports.ModelServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_model_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_model_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ModelServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_model_service_rest_lro_client(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_no_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_with_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_model_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ModelServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ModelServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_model._session + session2 = client2.transport.get_model._session + assert session1 != session2 + session1 = client1.transport.list_models._session + session2 = client2.transport.list_models._session + assert session1 != session2 + session1 = client1.transport.get_tuned_model._session + session2 = client2.transport.get_tuned_model._session + assert session1 != session2 + session1 = client1.transport.list_tuned_models._session + session2 = client2.transport.list_tuned_models._session + assert session1 != session2 + session1 = client1.transport.create_tuned_model._session + session2 = client2.transport.create_tuned_model._session + assert session1 != session2 + session1 = client1.transport.update_tuned_model._session + session2 = client2.transport.update_tuned_model._session + assert session1 != session2 + session1 = client1.transport.delete_tuned_model._session + session2 = client2.transport.delete_tuned_model._session + assert session1 != session2 + + +def test_model_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_model_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_service_grpc_lro_client(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_service_grpc_lro_async_client(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = ModelServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = ModelServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_model_path(path) + assert expected == actual + + +def test_tuned_model_path(): + tuned_model = "whelk" + expected = "tunedModels/{tuned_model}".format( + tuned_model=tuned_model, + ) + actual = ModelServiceClient.tuned_model_path(tuned_model) + assert expected == actual + + +def test_parse_tuned_model_path(): + expected = { + "tuned_model": "octopus", + } + path = ModelServiceClient.tuned_model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_tuned_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ModelServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ModelServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ModelServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ModelServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ModelServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ModelServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ModelServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ModelServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ModelServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ModelServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ModelServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport), + (ModelServiceAsyncClient, transports.ModelServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py new file mode 100644 index 000000000000..e48c91ae23e1 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -0,0 +1,4929 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.permission_service import ( + PermissionServiceAsyncClient, + PermissionServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1beta.types import permission as gag_permission +from google.ai.generativelanguage_v1beta.types import permission +from google.ai.generativelanguage_v1beta.types import permission_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PermissionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PermissionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PermissionServiceClient, "grpc"), + (PermissionServiceAsyncClient, "grpc_asyncio"), + (PermissionServiceClient, "rest"), + ], +) +def test_permission_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PermissionServiceGrpcTransport, "grpc"), + (transports.PermissionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PermissionServiceRestTransport, "rest"), + ], +) +def test_permission_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PermissionServiceClient, "grpc"), + (PermissionServiceAsyncClient, "grpc_asyncio"), + (PermissionServiceClient, "rest"), + ], +) +def test_permission_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_permission_service_client_get_transport_class(): + transport = PermissionServiceClient.get_transport_class() + available_transports = [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceRestTransport, + ] + assert transport in available_transports + + transport = PermissionServiceClient.get_transport_class("grpc") + assert transport == transports.PermissionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PermissionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceAsyncClient), +) +def test_permission_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PermissionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PermissionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + "true", + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PermissionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_permission_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PermissionServiceClient, PermissionServiceAsyncClient] +) +@mock.patch.object( + PermissionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceAsyncClient), +) +def test_permission_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), + ], +) +def test_permission_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_permission_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_permission_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.permission_service.transports.PermissionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PermissionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_permission_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.CreatePermissionRequest, + dict, + ], +) +def test_create_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + response = client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.CreatePermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_create_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + client.create_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.CreatePermissionRequest() + + +@pytest.mark.asyncio +async def test_create_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.CreatePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + response = await client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.CreatePermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_create_permission_async_from_dict(): + await test_create_permission_async(request_type=dict) + + +def test_create_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.CreatePermissionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.CreatePermissionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + await client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_permission( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + + +def test_create_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_permission( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.GetPermissionRequest, + dict, + ], +) +def test_get_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + response = client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.GetPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +def test_get_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + client.get_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.GetPermissionRequest() + + +@pytest.mark.asyncio +async def test_get_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.GetPermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + ) + response = await client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.GetPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_get_permission_async_from_dict(): + await test_get_permission_async(request_type=dict) + + +def test_get_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.GetPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value = permission.Permission() + client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.GetPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission() + ) + await client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.ListPermissionsRequest, + dict, + ], +) +def test_list_permissions(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.ListPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + client.list_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.ListPermissionsRequest() + + +@pytest.mark.asyncio +async def test_list_permissions_async( + transport: str = "grpc_asyncio", + request_type=permission_service.ListPermissionsRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.ListPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_permissions_async_from_dict(): + await test_list_permissions_async(request_type=dict) + + +def test_list_permissions_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.ListPermissionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value = permission_service.ListPermissionsResponse() + client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_permissions_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.ListPermissionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse() + ) + await client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_permissions_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_permissions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_permissions_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_permissions_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_permissions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_permissions_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +def test_list_permissions_pager(transport_name: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_permissions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, permission.Permission) for i in results) + + +def test_list_permissions_pages(transport_name: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + pages = list(client.list_permissions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_permissions_async_pager(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_permissions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_permissions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, permission.Permission) for i in responses) + + +@pytest.mark.asyncio +async def test_list_permissions_async_pages(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_permissions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_permissions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.UpdatePermissionRequest, + dict, + ], +) +def test_update_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + response = client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.UpdatePermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_update_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + client.update_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.UpdatePermissionRequest() + + +@pytest.mark.asyncio +async def test_update_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.UpdatePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + response = await client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.UpdatePermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_update_permission_async_from_dict(): + await test_update_permission_async(request_type=dict) + + +def test_update_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.UpdatePermissionRequest() + + request.permission.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "permission.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.UpdatePermissionRequest() + + request.permission.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + await client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "permission.name=name_value", + ) in kw["metadata"] + + +def test_update_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_permission( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_permission( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.DeletePermissionRequest, + dict, + ], +) +def test_delete_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.DeletePermissionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + client.delete_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.DeletePermissionRequest() + + +@pytest.mark.asyncio +async def test_delete_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.DeletePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.DeletePermissionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_permission_async_from_dict(): + await test_delete_permission_async(request_type=dict) + + +def test_delete_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.DeletePermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value = None + client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.DeletePermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.TransferOwnershipRequest, + dict, + ], +) +def test_transfer_ownership(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.TransferOwnershipResponse() + response = client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.TransferOwnershipRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +def test_transfer_ownership_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + client.transfer_ownership() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.TransferOwnershipRequest() + + +@pytest.mark.asyncio +async def test_transfer_ownership_async( + transport: str = "grpc_asyncio", + request_type=permission_service.TransferOwnershipRequest, +): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.TransferOwnershipResponse() + ) + response = await client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.TransferOwnershipRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +@pytest.mark.asyncio +async def test_transfer_ownership_async_from_dict(): + await test_transfer_ownership_async(request_type=dict) + + +def test_transfer_ownership_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.TransferOwnershipRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value = permission_service.TransferOwnershipResponse() + client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_transfer_ownership_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.TransferOwnershipRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.TransferOwnershipResponse() + ) + await client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.CreatePermissionRequest, + dict, + ], +) +def test_create_permission_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request_init["permission"] = { + "name": "name_value", + "grantee_type": 1, + "email_address": "email_address_value", + "role": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = permission_service.CreatePermissionRequest.meta.fields["permission"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["permission"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["permission"][field])): + del request_init["permission"][field][i][subfield] + else: + del request_init["permission"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_create_permission_rest_required_fields( + request_type=permission_service.CreatePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_permission._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "permission", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_create_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_create_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = permission_service.CreatePermissionRequest.pb( + permission_service.CreatePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gag_permission.Permission.to_json( + gag_permission.Permission() + ) + + request = permission_service.CreatePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_permission.Permission() + + client.create_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_permission_rest_bad_request( + transport: str = "rest", request_type=permission_service.CreatePermissionRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_permission(request) + + +def test_create_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=tunedModels/*}/permissions" % client.transport._host, + args[1], + ) + + +def test_create_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +def test_create_permission_rest_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.GetPermissionRequest, + dict, + ], +) +def test_get_permission_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +def test_get_permission_rest_required_fields( + request_type=permission_service.GetPermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_permission._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_get_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_get_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = permission_service.GetPermissionRequest.pb( + permission_service.GetPermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = permission.Permission.to_json( + permission.Permission() + ) + + request = permission_service.GetPermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission.Permission() + + client.get_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_permission_rest_bad_request( + transport: str = "rest", request_type=permission_service.GetPermissionRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_permission(request) + + +def test_get_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1/permissions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=tunedModels/*/permissions/*}" % client.transport._host, + args[1], + ) + + +def test_get_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +def test_get_permission_rest_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.ListPermissionsRequest, + dict, + ], +) +def test_list_permissions_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_permissions_rest_required_fields( + request_type=permission_service.ListPermissionsRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_permissions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_permissions_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_permissions_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_list_permissions" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_list_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = permission_service.ListPermissionsRequest.pb( + permission_service.ListPermissionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = permission_service.ListPermissionsResponse.to_json( + permission_service.ListPermissionsResponse() + ) + + request = permission_service.ListPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission_service.ListPermissionsResponse() + + client.list_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_permissions_rest_bad_request( + transport: str = "rest", request_type=permission_service.ListPermissionsRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_permissions(request) + + +def test_list_permissions_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=tunedModels/*}/permissions" % client.transport._host, + args[1], + ) + + +def test_list_permissions_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +def test_list_permissions_rest_pager(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + permission_service.ListPermissionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "tunedModels/sample1"} + + pager = client.list_permissions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, permission.Permission) for i in results) + + pages = list(client.list_permissions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.UpdatePermissionRequest, + dict, + ], +) +def test_update_permission_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"permission": {"name": "tunedModels/sample1/permissions/sample2"}} + request_init["permission"] = { + "name": "tunedModels/sample1/permissions/sample2", + "grantee_type": 1, + "email_address": "email_address_value", + "role": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = permission_service.UpdatePermissionRequest.meta.fields["permission"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["permission"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["permission"][field])): + del request_init["permission"][field][i][subfield] + else: + del request_init["permission"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_update_permission_rest_required_fields( + request_type=permission_service.UpdatePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_permission._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_permission._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "permission", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_update_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_update_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = permission_service.UpdatePermissionRequest.pb( + permission_service.UpdatePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gag_permission.Permission.to_json( + gag_permission.Permission() + ) + + request = permission_service.UpdatePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_permission.Permission() + + client.update_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_permission_rest_bad_request( + transport: str = "rest", request_type=permission_service.UpdatePermissionRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"permission": {"name": "tunedModels/sample1/permissions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_permission(request) + + +def test_update_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = { + "permission": {"name": "tunedModels/sample1/permissions/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{permission.name=tunedModels/*/permissions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_permission_rest_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.DeletePermissionRequest, + dict, + ], +) +def test_delete_permission_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_permission(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_permission_rest_required_fields( + request_type=permission_service.DeletePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_permission._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_delete_permission" + ) as pre: + pre.assert_not_called() + pb_message = permission_service.DeletePermissionRequest.pb( + permission_service.DeletePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = permission_service.DeletePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_permission_rest_bad_request( + transport: str = "rest", request_type=permission_service.DeletePermissionRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_permission(request) + + +def test_delete_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1/permissions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=tunedModels/*/permissions/*}" % client.transport._host, + args[1], + ) + + +def test_delete_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +def test_delete_permission_rest_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.TransferOwnershipRequest, + dict, + ], +) +def test_transfer_ownership_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.TransferOwnershipResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission_service.TransferOwnershipResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.transfer_ownership(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +def test_transfer_ownership_rest_required_fields( + request_type=permission_service.TransferOwnershipRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["email_address"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_ownership._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["emailAddress"] = "email_address_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_ownership._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "emailAddress" in jsonified_request + assert jsonified_request["emailAddress"] == "email_address_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission_service.TransferOwnershipResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.TransferOwnershipResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.transfer_ownership(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_transfer_ownership_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.transfer_ownership._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "emailAddress", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_transfer_ownership_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_transfer_ownership" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_transfer_ownership" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = permission_service.TransferOwnershipRequest.pb( + permission_service.TransferOwnershipRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + permission_service.TransferOwnershipResponse.to_json( + permission_service.TransferOwnershipResponse() + ) + ) + + request = permission_service.TransferOwnershipRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission_service.TransferOwnershipResponse() + + client.transfer_ownership( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_transfer_ownership_rest_bad_request( + transport: str = "rest", request_type=permission_service.TransferOwnershipRequest +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.transfer_ownership(request) + + +def test_transfer_ownership_rest_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PermissionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PermissionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + transports.PermissionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PermissionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PermissionServiceGrpcTransport, + ) + + +def test_permission_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PermissionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_permission_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.permission_service.transports.PermissionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PermissionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_permission", + "get_permission", + "list_permissions", + "update_permission", + "delete_permission", + "transfer_ownership", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_permission_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.permission_service.transports.PermissionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PermissionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_permission_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.permission_service.transports.PermissionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PermissionServiceTransport() + adc.assert_called_once() + + +def test_permission_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PermissionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + transports.PermissionServiceRestTransport, + ], +) +def test_permission_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PermissionServiceGrpcTransport, grpc_helpers), + (transports.PermissionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_permission_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_permission_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PermissionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_permission_service_host_no_port(transport_name): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_permission_service_host_with_port(transport_name): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_permission_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PermissionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PermissionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_permission._session + session2 = client2.transport.create_permission._session + assert session1 != session2 + session1 = client1.transport.get_permission._session + session2 = client2.transport.get_permission._session + assert session1 != session2 + session1 = client1.transport.list_permissions._session + session2 = client2.transport.list_permissions._session + assert session1 != session2 + session1 = client1.transport.update_permission._session + session2 = client2.transport.update_permission._session + assert session1 != session2 + session1 = client1.transport.delete_permission._session + session2 = client2.transport.delete_permission._session + assert session1 != session2 + session1 = client1.transport.transfer_ownership._session + session2 = client2.transport.transfer_ownership._session + assert session1 != session2 + + +def test_permission_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PermissionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_permission_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PermissionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_permission_path(): + tuned_model = "squid" + permission = "clam" + expected = "tunedModels/{tuned_model}/permissions/{permission}".format( + tuned_model=tuned_model, + permission=permission, + ) + actual = PermissionServiceClient.permission_path(tuned_model, permission) + assert expected == actual + + +def test_parse_permission_path(): + expected = { + "tuned_model": "whelk", + "permission": "octopus", + } + path = PermissionServiceClient.permission_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_permission_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PermissionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = PermissionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PermissionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = PermissionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PermissionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = PermissionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = PermissionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = PermissionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PermissionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = PermissionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PermissionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PermissionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PermissionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PermissionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py new file mode 100644 index 000000000000..aa722566362a --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -0,0 +1,11851 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.retriever_service import ( + RetrieverServiceAsyncClient, + RetrieverServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1beta.types import retriever, retriever_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RetrieverServiceClient._get_default_mtls_endpoint(None) is None + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RetrieverServiceClient, "grpc"), + (RetrieverServiceAsyncClient, "grpc_asyncio"), + (RetrieverServiceClient, "rest"), + ], +) +def test_retriever_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RetrieverServiceGrpcTransport, "grpc"), + (transports.RetrieverServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RetrieverServiceRestTransport, "rest"), + ], +) +def test_retriever_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RetrieverServiceClient, "grpc"), + (RetrieverServiceAsyncClient, "grpc_asyncio"), + (RetrieverServiceClient, "rest"), + ], +) +def test_retriever_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_retriever_service_client_get_transport_class(): + transport = RetrieverServiceClient.get_transport_class() + available_transports = [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceRestTransport, + ] + assert transport in available_transports + + transport = RetrieverServiceClient.get_transport_class("grpc") + assert transport == transports.RetrieverServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport, "grpc"), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RetrieverServiceClient, transports.RetrieverServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + RetrieverServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceAsyncClient), +) +def test_retriever_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RetrieverServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RetrieverServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + "true", + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + "false", + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + "true", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RetrieverServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_retriever_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [RetrieverServiceClient, RetrieverServiceAsyncClient] +) +@mock.patch.object( + RetrieverServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceAsyncClient), +) +def test_retriever_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport, "grpc"), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RetrieverServiceClient, transports.RetrieverServiceRestTransport, "rest"), + ], +) +def test_retriever_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + None, + ), + ], +) +def test_retriever_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_retriever_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.retriever_service.transports.RetrieverServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = RetrieverServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_retriever_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateCorpusRequest, + dict, + ], +) +def test_create_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_corpus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + client.create_corpus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateCorpusRequest() + + +@pytest.mark.asyncio +async def test_create_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.CreateCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_corpus_async_from_dict(): + await test_create_corpus_async(request_type=dict) + + +def test_create_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_corpus( + corpus=retriever.Corpus(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + + +def test_create_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_corpus( + corpus=retriever.Corpus(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetCorpusRequest, + dict, + ], +) +def test_get_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_corpus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + client.get_corpus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetCorpusRequest() + + +@pytest.mark.asyncio +async def test_get_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_corpus_async_from_dict(): + await test_get_corpus_async(request_type=dict) + + +def test_get_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + await client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateCorpusRequest, + dict, + ], +) +def test_update_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_corpus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + client.update_corpus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateCorpusRequest() + + +@pytest.mark.asyncio +async def test_update_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.UpdateCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_corpus_async_from_dict(): + await test_update_corpus_async(request_type=dict) + + +def test_update_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateCorpusRequest() + + request.corpus.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "corpus.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateCorpusRequest() + + request.corpus.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + await client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "corpus.name=name_value", + ) in kw["metadata"] + + +def test_update_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_corpus( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_corpus( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteCorpusRequest, + dict, + ], +) +def test_delete_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteCorpusRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_corpus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + client.delete_corpus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteCorpusRequest() + + +@pytest.mark.asyncio +async def test_delete_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.DeleteCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteCorpusRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_corpus_async_from_dict(): + await test_delete_corpus_async(request_type=dict) + + +def test_delete_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value = None + client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListCorporaRequest, + dict, + ], +) +def test_list_corpora(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + response = client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListCorporaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_corpora_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + client.list_corpora() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListCorporaRequest() + + +@pytest.mark.asyncio +async def test_list_corpora_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListCorporaRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListCorporaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_corpora_async_from_dict(): + await test_list_corpora_async(request_type=dict) + + +def test_list_corpora_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_corpora(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Corpus) for i in results) + + +def test_list_corpora_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + pages = list(client.list_corpora(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_corpora_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_corpora), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_corpora( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Corpus) for i in responses) + + +@pytest.mark.asyncio +async def test_list_corpora_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_corpora), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_corpora(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryCorpusRequest, + dict, + ], +) +def test_query_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.QueryCorpusResponse() + response = client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +def test_query_corpus_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + client.query_corpus() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryCorpusRequest() + + +@pytest.mark.asyncio +async def test_query_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.QueryCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryCorpusResponse() + ) + response = await client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryCorpusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +@pytest.mark.asyncio +async def test_query_corpus_async_from_dict(): + await test_query_corpus_async(request_type=dict) + + +def test_query_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value = retriever_service.QueryCorpusResponse() + client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryCorpusResponse() + ) + await client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + client.create_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateDocumentRequest() + + +@pytest.mark.asyncio +async def test_create_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.CreateDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_document_async_from_dict(): + await test_create_document_async(request_type=dict) + + +def test_create_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = retriever.Document() + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_document( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + + +def test_create_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_document( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetDocumentRequest, + dict, + ], +) +def test_get_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + client.get_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetDocumentRequest() + + +@pytest.mark.asyncio +async def test_get_document_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetDocumentRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_document_async_from_dict(): + await test_get_document_async(request_type=dict) + + +def test_get_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = retriever.Document() + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + client.update_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateDocumentRequest() + + +@pytest.mark.asyncio +async def test_update_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.UpdateDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_document_async_from_dict(): + await test_update_document_async(request_type=dict) + + +def test_update_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = retriever.Document() + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +def test_update_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_document( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_document( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteDocumentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + client.delete_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteDocumentRequest() + + +@pytest.mark.asyncio +async def test_delete_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.DeleteDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteDocumentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_document_async_from_dict(): + await test_delete_document_async(request_type=dict) + + +def test_delete_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = None + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + client.list_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListDocumentsRequest() + + +@pytest.mark.asyncio +async def test_list_documents_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListDocumentsRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_documents_async_from_dict(): + await test_list_documents_async(request_type=dict) + + +def test_list_documents_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = retriever_service.ListDocumentsResponse() + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_documents_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse() + ) + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_documents_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_documents_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_documents_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_documents_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_documents(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Document) for i in results) + + +def test_list_documents_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + pages = list(client.list_documents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_documents_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_documents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Document) for i in responses) + + +@pytest.mark.asyncio +async def test_list_documents_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_documents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryDocumentRequest, + dict, + ], +) +def test_query_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.QueryDocumentResponse() + response = client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +def test_query_document_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + client.query_document() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryDocumentRequest() + + +@pytest.mark.asyncio +async def test_query_document_async( + transport: str = "grpc_asyncio", request_type=retriever_service.QueryDocumentRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryDocumentResponse() + ) + response = await client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryDocumentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +@pytest.mark.asyncio +async def test_query_document_async_from_dict(): + await test_query_document_async(request_type=dict) + + +def test_query_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value = retriever_service.QueryDocumentResponse() + client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryDocumentResponse() + ) + await client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateChunkRequest, + dict, + ], +) +def test_create_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_create_chunk_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + client.create_chunk() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateChunkRequest() + + +@pytest.mark.asyncio +async def test_create_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.CreateChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_create_chunk_async_from_dict(): + await test_create_chunk_async(request_type=dict) + + +def test_create_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateChunkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateChunkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_chunk( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + + +def test_create_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_chunk( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchCreateChunksRequest, + dict, + ], +) +def test_batch_create_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.BatchCreateChunksResponse() + response = client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchCreateChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +def test_batch_create_chunks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + client.batch_create_chunks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchCreateChunksRequest() + + +@pytest.mark.asyncio +async def test_batch_create_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchCreateChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchCreateChunksResponse() + ) + response = await client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchCreateChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +@pytest.mark.asyncio +async def test_batch_create_chunks_async_from_dict(): + await test_batch_create_chunks_async(request_type=dict) + + +def test_batch_create_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchCreateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchCreateChunksResponse() + client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchCreateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchCreateChunksResponse() + ) + await client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetChunkRequest, + dict, + ], +) +def test_get_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_get_chunk_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + client.get_chunk() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetChunkRequest() + + +@pytest.mark.asyncio +async def test_get_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_get_chunk_async_from_dict(): + await test_get_chunk_async(request_type=dict) + + +def test_get_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateChunkRequest, + dict, + ], +) +def test_update_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_update_chunk_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + client.update_chunk() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateChunkRequest() + + +@pytest.mark.asyncio +async def test_update_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.UpdateChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateChunkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_update_chunk_async_from_dict(): + await test_update_chunk_async(request_type=dict) + + +def test_update_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateChunkRequest() + + request.chunk.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "chunk.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateChunkRequest() + + request.chunk.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "chunk.name=name_value", + ) in kw["metadata"] + + +def test_update_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_chunk( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_chunk( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchUpdateChunksRequest, + dict, + ], +) +def test_batch_update_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.BatchUpdateChunksResponse() + response = client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchUpdateChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +def test_batch_update_chunks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + client.batch_update_chunks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchUpdateChunksRequest() + + +@pytest.mark.asyncio +async def test_batch_update_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchUpdateChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchUpdateChunksResponse() + ) + response = await client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchUpdateChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +@pytest.mark.asyncio +async def test_batch_update_chunks_async_from_dict(): + await test_batch_update_chunks_async(request_type=dict) + + +def test_batch_update_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchUpdateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchUpdateChunksResponse() + client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_update_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchUpdateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchUpdateChunksResponse() + ) + await client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteChunkRequest, + dict, + ], +) +def test_delete_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteChunkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_chunk_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + client.delete_chunk() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteChunkRequest() + + +@pytest.mark.asyncio +async def test_delete_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.DeleteChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteChunkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_chunk_async_from_dict(): + await test_delete_chunk_async(request_type=dict) + + +def test_delete_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value = None + client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchDeleteChunksRequest, + dict, + ], +) +def test_batch_delete_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchDeleteChunksRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_delete_chunks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + client.batch_delete_chunks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchDeleteChunksRequest() + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchDeleteChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchDeleteChunksRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_async_from_dict(): + await test_batch_delete_chunks_async(request_type=dict) + + +def test_batch_delete_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchDeleteChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value = None + client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchDeleteChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListChunksRequest, + dict, + ], +) +def test_list_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_chunks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + client.list_chunks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListChunksRequest() + + +@pytest.mark.asyncio +async def test_list_chunks_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListChunksRequest +): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListChunksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_chunks_async_from_dict(): + await test_list_chunks_async(request_type=dict) + + +def test_list_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value = retriever_service.ListChunksResponse() + client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse() + ) + await client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_chunks_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_chunks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_chunks_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_chunks_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_chunks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_chunks_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +def test_list_chunks_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_chunks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Chunk) for i in results) + + +def test_list_chunks_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + pages = list(client.list_chunks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_chunks_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_chunks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_chunks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Chunk) for i in responses) + + +@pytest.mark.asyncio +async def test_list_chunks_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_chunks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_chunks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateCorpusRequest, + dict, + ], +) +def test_create_corpus_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["corpus"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateCorpusRequest.meta.fields["corpus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["corpus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["corpus"][field])): + del request_init["corpus"][field][i][subfield] + else: + del request_init["corpus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_corpus_rest_required_fields( + request_type=retriever_service.CreateCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("corpus",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.CreateCorpusRequest.pb( + retriever_service.CreateCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Corpus.to_json(retriever.Corpus()) + + request = retriever_service.CreateCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + + client.create_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_corpus_rest_bad_request( + transport: str = "rest", request_type=retriever_service.CreateCorpusRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_corpus(request) + + +def test_create_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + corpus=retriever.Corpus(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/corpora" % client.transport._host, args[1] + ) + + +def test_create_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +def test_create_corpus_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetCorpusRequest, + dict, + ], +) +def test_get_corpus_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_corpus_rest_required_fields( + request_type=retriever_service.GetCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.GetCorpusRequest.pb( + retriever_service.GetCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Corpus.to_json(retriever.Corpus()) + + request = retriever_service.GetCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + + client.get_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_corpus_rest_bad_request( + transport: str = "rest", request_type=retriever_service.GetCorpusRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_corpus(request) + + +def test_get_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_get_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +def test_get_corpus_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateCorpusRequest, + dict, + ], +) +def test_update_corpus_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"corpus": {"name": "corpora/sample1"}} + request_init["corpus"] = { + "name": "corpora/sample1", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateCorpusRequest.meta.fields["corpus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["corpus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["corpus"][field])): + del request_init["corpus"][field][i][subfield] + else: + del request_init["corpus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_corpus_rest_required_fields( + request_type=retriever_service.UpdateCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_corpus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_corpus._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "corpus", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.UpdateCorpusRequest.pb( + retriever_service.UpdateCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Corpus.to_json(retriever.Corpus()) + + request = retriever_service.UpdateCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + + client.update_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_corpus_rest_bad_request( + transport: str = "rest", request_type=retriever_service.UpdateCorpusRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"corpus": {"name": "corpora/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_corpus(request) + + +def test_update_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {"corpus": {"name": "corpora/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{corpus.name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_update_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_corpus_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteCorpusRequest, + dict, + ], +) +def test_delete_corpus_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_corpus(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_corpus_rest_required_fields( + request_type=retriever_service.DeleteCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_corpus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_corpus" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteCorpusRequest.pb( + retriever_service.DeleteCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = retriever_service.DeleteCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_corpus_rest_bad_request( + transport: str = "rest", request_type=retriever_service.DeleteCorpusRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_corpus(request) + + +def test_delete_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_delete_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +def test_delete_corpus_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListCorporaRequest, + dict, + ], +) +def test_list_corpora_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListCorporaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_corpora(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_corpora_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_corpora" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_corpora" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.ListCorporaRequest.pb( + retriever_service.ListCorporaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.ListCorporaResponse.to_json( + retriever_service.ListCorporaResponse() + ) + + request = retriever_service.ListCorporaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListCorporaResponse() + + client.list_corpora( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_corpora_rest_bad_request( + transport: str = "rest", request_type=retriever_service.ListCorporaRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_corpora(request) + + +def test_list_corpora_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListCorporaResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_corpora(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Corpus) for i in results) + + pages = list(client.list_corpora(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryCorpusRequest, + dict, + ], +) +def test_query_corpus_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryCorpusResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.QueryCorpusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.query_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +def test_query_corpus_rest_required_fields( + request_type=retriever_service.QueryCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryCorpusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryCorpusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.query_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_query_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.query_corpus._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_query_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.QueryCorpusRequest.pb( + retriever_service.QueryCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.QueryCorpusResponse.to_json( + retriever_service.QueryCorpusResponse() + ) + + request = retriever_service.QueryCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.QueryCorpusResponse() + + client.query_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_corpus_rest_bad_request( + transport: str = "rest", request_type=retriever_service.QueryCorpusRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.query_corpus(request) + + +def test_query_corpus_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request_init["document"] = { + "name": "name_value", + "display_name": "display_name_value", + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_document_rest_required_fields( + request_type=retriever_service.CreateDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "document", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.CreateDocumentRequest.pb( + retriever_service.CreateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Document.to_json(retriever.Document()) + + request = retriever_service.CreateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + + client.create_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=retriever_service.CreateDocumentRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_document(request) + + +def test_create_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=corpora/*}/documents" % client.transport._host, args[1] + ) + + +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +def test_create_document_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetDocumentRequest, + dict, + ], +) +def test_get_document_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_document_rest_required_fields( + request_type=retriever_service.GetDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.GetDocumentRequest.pb( + retriever_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Document.to_json(retriever.Document()) + + request = retriever_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=retriever_service.GetDocumentRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*/documents/*}" % client.transport._host, args[1] + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"document": {"name": "corpora/sample1/documents/sample2"}} + request_init["document"] = { + "name": "corpora/sample1/documents/sample2", + "display_name": "display_name_value", + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_document_rest_required_fields( + request_type=retriever_service.UpdateDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "document", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.UpdateDocumentRequest.pb( + retriever_service.UpdateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Document.to_json(retriever.Document()) + + request = retriever_service.UpdateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + + client.update_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=retriever_service.UpdateDocumentRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"document": {"name": "corpora/sample1/documents/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_document(request) + + +def test_update_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"document": {"name": "corpora/sample1/documents/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{document.name=corpora/*/documents/*}" % client.transport._host, + args[1], + ) + + +def test_update_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_document_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_rest_required_fields( + request_type=retriever_service.DeleteDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_document" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteDocumentRequest.pb( + retriever_service.DeleteDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = retriever_service.DeleteDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=retriever_service.DeleteDocumentRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_document(request) + + +def test_delete_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*/documents/*}" % client.transport._host, args[1] + ) + + +def test_delete_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +def test_delete_document_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_documents_rest_required_fields( + request_type=retriever_service.ListDocumentsRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_documents_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_documents" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.ListDocumentsRequest.pb( + retriever_service.ListDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.ListDocumentsResponse.to_json( + retriever_service.ListDocumentsResponse() + ) + + request = retriever_service.ListDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListDocumentsResponse() + + client.list_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=retriever_service.ListDocumentsRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_documents(request) + + +def test_list_documents_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=corpora/*}/documents" % client.transport._host, args[1] + ) + + +def test_list_documents_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "corpora/sample1"} + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryDocumentRequest, + dict, + ], +) +def test_query_document_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryDocumentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.QueryDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.query_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +def test_query_document_rest_required_fields( + request_type=retriever_service.QueryDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.query_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_query_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.query_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "query", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_query_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.QueryDocumentRequest.pb( + retriever_service.QueryDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.QueryDocumentResponse.to_json( + retriever_service.QueryDocumentResponse() + ) + + request = retriever_service.QueryDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.QueryDocumentResponse() + + client.query_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_document_rest_bad_request( + transport: str = "rest", request_type=retriever_service.QueryDocumentRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.query_document(request) + + +def test_query_document_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateChunkRequest, + dict, + ], +) +def test_create_chunk_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request_init["chunk"] = { + "name": "name_value", + "data": {"string_value": "string_value_value"}, + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateChunkRequest.meta.fields["chunk"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["chunk"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["chunk"][field])): + del request_init["chunk"][field][i][subfield] + else: + del request_init["chunk"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_create_chunk_rest_required_fields( + request_type=retriever_service.CreateChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_chunk._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "chunk", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.CreateChunkRequest.pb( + retriever_service.CreateChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Chunk.to_json(retriever.Chunk()) + + request = retriever_service.CreateChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + + client.create_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_chunk_rest_bad_request( + transport: str = "rest", request_type=retriever_service.CreateChunkRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_chunk(request) + + +def test_create_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=corpora/*/documents/*}/chunks" % client.transport._host, + args[1], + ) + + +def test_create_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +def test_create_chunk_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchCreateChunksRequest, + dict, + ], +) +def test_batch_create_chunks_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchCreateChunksResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.BatchCreateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +def test_batch_create_chunks_rest_required_fields( + request_type=retriever_service.BatchCreateChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchCreateChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchCreateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_batch_create_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_create_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.BatchCreateChunksRequest.pb( + retriever_service.BatchCreateChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.BatchCreateChunksResponse.to_json( + retriever_service.BatchCreateChunksResponse() + ) + + request = retriever_service.BatchCreateChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.BatchCreateChunksResponse() + + client.batch_create_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_chunks_rest_bad_request( + transport: str = "rest", request_type=retriever_service.BatchCreateChunksRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_chunks(request) + + +def test_batch_create_chunks_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetChunkRequest, + dict, + ], +) +def test_get_chunk_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_get_chunk_rest_required_fields(request_type=retriever_service.GetChunkRequest): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_chunk._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.GetChunkRequest.pb( + retriever_service.GetChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Chunk.to_json(retriever.Chunk()) + + request = retriever_service.GetChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + + client.get_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_chunk_rest_bad_request( + transport: str = "rest", request_type=retriever_service.GetChunkRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_chunk(request) + + +def test_get_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*/documents/*/chunks/*}" % client.transport._host, + args[1], + ) + + +def test_get_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +def test_get_chunk_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateChunkRequest, + dict, + ], +) +def test_update_chunk_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + request_init["chunk"] = { + "name": "corpora/sample1/documents/sample2/chunks/sample3", + "data": {"string_value": "string_value_value"}, + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateChunkRequest.meta.fields["chunk"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["chunk"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["chunk"][field])): + del request_init["chunk"][field][i][subfield] + else: + del request_init["chunk"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_update_chunk_rest_required_fields( + request_type=retriever_service.UpdateChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_chunk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_chunk._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "chunk", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.UpdateChunkRequest.pb( + retriever_service.UpdateChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever.Chunk.to_json(retriever.Chunk()) + + request = retriever_service.UpdateChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + + client.update_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_chunk_rest_bad_request( + transport: str = "rest", request_type=retriever_service.UpdateChunkRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_chunk(request) + + +def test_update_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{chunk.name=corpora/*/documents/*/chunks/*}" + % client.transport._host, + args[1], + ) + + +def test_update_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_chunk_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchUpdateChunksRequest, + dict, + ], +) +def test_batch_update_chunks_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchUpdateChunksResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.BatchUpdateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_update_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +def test_batch_update_chunks_rest_required_fields( + request_type=retriever_service.BatchUpdateChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchUpdateChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchUpdateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_update_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_update_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_update_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_update_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_batch_update_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_update_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.BatchUpdateChunksRequest.pb( + retriever_service.BatchUpdateChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.BatchUpdateChunksResponse.to_json( + retriever_service.BatchUpdateChunksResponse() + ) + + request = retriever_service.BatchUpdateChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.BatchUpdateChunksResponse() + + client.batch_update_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_update_chunks_rest_bad_request( + transport: str = "rest", request_type=retriever_service.BatchUpdateChunksRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_update_chunks(request) + + +def test_batch_update_chunks_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteChunkRequest, + dict, + ], +) +def test_delete_chunk_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_chunk(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_chunk_rest_required_fields( + request_type=retriever_service.DeleteChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_chunk._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_chunk" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteChunkRequest.pb( + retriever_service.DeleteChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = retriever_service.DeleteChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_chunk_rest_bad_request( + transport: str = "rest", request_type=retriever_service.DeleteChunkRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_chunk(request) + + +def test_delete_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=corpora/*/documents/*/chunks/*}" % client.transport._host, + args[1], + ) + + +def test_delete_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +def test_delete_chunk_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchDeleteChunksRequest, + dict, + ], +) +def test_batch_delete_chunks_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_delete_chunks(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_delete_chunks_rest_required_fields( + request_type=retriever_service.BatchDeleteChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_delete_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_delete_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_delete_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_delete_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_delete_chunks" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.BatchDeleteChunksRequest.pb( + retriever_service.BatchDeleteChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = retriever_service.BatchDeleteChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.batch_delete_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_batch_delete_chunks_rest_bad_request( + transport: str = "rest", request_type=retriever_service.BatchDeleteChunksRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_delete_chunks(request) + + +def test_batch_delete_chunks_rest_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListChunksRequest, + dict, + ], +) +def test_list_chunks_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_chunks_rest_required_fields( + request_type=retriever_service.ListChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_chunks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_chunks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = retriever_service.ListChunksRequest.pb( + retriever_service.ListChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = retriever_service.ListChunksResponse.to_json( + retriever_service.ListChunksResponse() + ) + + request = retriever_service.ListChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListChunksResponse() + + client.list_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_chunks_rest_bad_request( + transport: str = "rest", request_type=retriever_service.ListChunksRequest +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_chunks(request) + + +def test_list_chunks_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_chunks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=corpora/*/documents/*}/chunks" % client.transport._host, + args[1], + ) + + +def test_list_chunks_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +def test_list_chunks_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListChunksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + pager = client.list_chunks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Chunk) for i in results) + + pages = list(client.list_chunks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RetrieverServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RetrieverServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + transports.RetrieverServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = RetrieverServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RetrieverServiceGrpcTransport, + ) + + +def test_retriever_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RetrieverServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_retriever_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.retriever_service.transports.RetrieverServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RetrieverServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_corpus", + "get_corpus", + "update_corpus", + "delete_corpus", + "list_corpora", + "query_corpus", + "create_document", + "get_document", + "update_document", + "delete_document", + "list_documents", + "query_document", + "create_chunk", + "batch_create_chunks", + "get_chunk", + "update_chunk", + "batch_update_chunks", + "delete_chunk", + "batch_delete_chunks", + "list_chunks", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_retriever_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.retriever_service.transports.RetrieverServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RetrieverServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_retriever_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.retriever_service.transports.RetrieverServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RetrieverServiceTransport() + adc.assert_called_once() + + +def test_retriever_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RetrieverServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + transports.RetrieverServiceRestTransport, + ], +) +def test_retriever_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RetrieverServiceGrpcTransport, grpc_helpers), + (transports.RetrieverServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_retriever_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_retriever_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RetrieverServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_retriever_service_host_no_port(transport_name): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_retriever_service_host_with_port(transport_name): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_retriever_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RetrieverServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RetrieverServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_corpus._session + session2 = client2.transport.create_corpus._session + assert session1 != session2 + session1 = client1.transport.get_corpus._session + session2 = client2.transport.get_corpus._session + assert session1 != session2 + session1 = client1.transport.update_corpus._session + session2 = client2.transport.update_corpus._session + assert session1 != session2 + session1 = client1.transport.delete_corpus._session + session2 = client2.transport.delete_corpus._session + assert session1 != session2 + session1 = client1.transport.list_corpora._session + session2 = client2.transport.list_corpora._session + assert session1 != session2 + session1 = client1.transport.query_corpus._session + session2 = client2.transport.query_corpus._session + assert session1 != session2 + session1 = client1.transport.create_document._session + session2 = client2.transport.create_document._session + assert session1 != session2 + session1 = client1.transport.get_document._session + session2 = client2.transport.get_document._session + assert session1 != session2 + session1 = client1.transport.update_document._session + session2 = client2.transport.update_document._session + assert session1 != session2 + session1 = client1.transport.delete_document._session + session2 = client2.transport.delete_document._session + assert session1 != session2 + session1 = client1.transport.list_documents._session + session2 = client2.transport.list_documents._session + assert session1 != session2 + session1 = client1.transport.query_document._session + session2 = client2.transport.query_document._session + assert session1 != session2 + session1 = client1.transport.create_chunk._session + session2 = client2.transport.create_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_create_chunks._session + session2 = client2.transport.batch_create_chunks._session + assert session1 != session2 + session1 = client1.transport.get_chunk._session + session2 = client2.transport.get_chunk._session + assert session1 != session2 + session1 = client1.transport.update_chunk._session + session2 = client2.transport.update_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_update_chunks._session + session2 = client2.transport.batch_update_chunks._session + assert session1 != session2 + session1 = client1.transport.delete_chunk._session + session2 = client2.transport.delete_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_delete_chunks._session + session2 = client2.transport.batch_delete_chunks._session + assert session1 != session2 + session1 = client1.transport.list_chunks._session + session2 = client2.transport.list_chunks._session + assert session1 != session2 + + +def test_retriever_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RetrieverServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_retriever_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RetrieverServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_chunk_path(): + corpus = "squid" + document = "clam" + chunk = "whelk" + expected = "corpora/{corpus}/documents/{document}/chunks/{chunk}".format( + corpus=corpus, + document=document, + chunk=chunk, + ) + actual = RetrieverServiceClient.chunk_path(corpus, document, chunk) + assert expected == actual + + +def test_parse_chunk_path(): + expected = { + "corpus": "octopus", + "document": "oyster", + "chunk": "nudibranch", + } + path = RetrieverServiceClient.chunk_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_chunk_path(path) + assert expected == actual + + +def test_corpus_path(): + corpus = "cuttlefish" + expected = "corpora/{corpus}".format( + corpus=corpus, + ) + actual = RetrieverServiceClient.corpus_path(corpus) + assert expected == actual + + +def test_parse_corpus_path(): + expected = { + "corpus": "mussel", + } + path = RetrieverServiceClient.corpus_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_corpus_path(path) + assert expected == actual + + +def test_document_path(): + corpus = "winkle" + document = "nautilus" + expected = "corpora/{corpus}/documents/{document}".format( + corpus=corpus, + document=document, + ) + actual = RetrieverServiceClient.document_path(corpus, document) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "corpus": "scallop", + "document": "abalone", + } + path = RetrieverServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_document_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RetrieverServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RetrieverServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RetrieverServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RetrieverServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RetrieverServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RetrieverServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RetrieverServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RetrieverServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RetrieverServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RetrieverServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RetrieverServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RetrieverServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RetrieverServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RetrieverServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport), + (RetrieverServiceAsyncClient, transports.RetrieverServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py new file mode 100644 index 000000000000..17dbc3d8a503 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py @@ -0,0 +1,3544 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ai.generativelanguage_v1beta.services.text_service import ( + TextServiceAsyncClient, + TextServiceClient, + transports, +) +from google.ai.generativelanguage_v1beta.types import safety, text_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TextServiceClient._get_default_mtls_endpoint(None) is None + assert ( + TextServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TextServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextServiceGrpcTransport, "grpc"), + (transports.TextServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_text_service_client_get_transport_class(): + transport = TextServiceClient.get_transport_class() + available_transports = [ + transports.TextServiceGrpcTransport, + transports.TextServiceRestTransport, + ] + assert transport in available_transports + + transport = TextServiceClient.get_transport_class("grpc") + assert transport == transports.TextServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +def test_text_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "true"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "false"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "true"), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TextServiceClient, TextServiceAsyncClient]) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +def test_text_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", None), + ], +) +def test_text_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.text_service.transports.TextServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + response = client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +def test_generate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + client.generate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + +@pytest.mark.asyncio +async def test_generate_text_async( + transport: str = "grpc_asyncio", request_type=text_service.GenerateTextRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + response = await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +@pytest.mark.asyncio +async def test_generate_text_async_from_dict(): + await test_generate_text_async(request_type=dict) + + +def test_generate_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = text_service.GenerateTextResponse() + client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + response = client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +def test_embed_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + client.embed_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + +@pytest.mark.asyncio +async def test_embed_text_async( + transport: str = "grpc_asyncio", request_type=text_service.EmbedTextRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + response = await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +@pytest.mark.asyncio +async def test_embed_text_async_from_dict(): + await test_embed_text_async(request_type=dict) + + +def test_embed_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = text_service.EmbedTextResponse() + client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +def test_embed_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.asyncio +async def test_embed_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.BatchEmbedTextRequest, + dict, + ], +) +def test_batch_embed_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + response = client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.BatchEmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +def test_batch_embed_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + client.batch_embed_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.BatchEmbedTextRequest() + + +@pytest.mark.asyncio +async def test_batch_embed_text_async( + transport: str = "grpc_asyncio", request_type=text_service.BatchEmbedTextRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + response = await client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.BatchEmbedTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +@pytest.mark.asyncio +async def test_batch_embed_text_async_from_dict(): + await test_batch_embed_text_async(request_type=dict) + + +def test_batch_embed_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.BatchEmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value = text_service.BatchEmbedTextResponse() + client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_embed_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.BatchEmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + await client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_batch_embed_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_embed_text( + model="model_value", + texts=["texts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].texts + mock_val = ["texts_value"] + assert arg == mock_val + + +def test_batch_embed_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +@pytest.mark.asyncio +async def test_batch_embed_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_embed_text( + model="model_value", + texts=["texts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].texts + mock_val = ["texts_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_embed_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.CountTextTokensRequest, + dict, + ], +) +def test_count_text_tokens(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse( + token_count=1193, + ) + response = client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.CountTextTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +def test_count_text_tokens_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + client.count_text_tokens() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.CountTextTokensRequest() + + +@pytest.mark.asyncio +async def test_count_text_tokens_async( + transport: str = "grpc_asyncio", request_type=text_service.CountTextTokensRequest +): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse( + token_count=1193, + ) + ) + response = await client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.CountTextTokensRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.asyncio +async def test_count_text_tokens_async_from_dict(): + await test_count_text_tokens_async(request_type=dict) + + +def test_count_text_tokens_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.CountTextTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value = text_service.CountTextTokensResponse() + client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_text_tokens_field_headers_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.CountTextTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse() + ) + await client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_text_tokens_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_text_tokens( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + + +def test_count_text_tokens_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +@pytest.mark.asyncio +async def test_count_text_tokens_flattened_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_text_tokens( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_text_tokens_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +def test_generate_text_rest_required_fields( + request_type=text_service.GenerateTextRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_generate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.GenerateTextRequest.pb( + text_service.GenerateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.GenerateTextResponse.to_json( + text_service.GenerateTextResponse() + ) + + request = text_service.GenerateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.GenerateTextResponse() + + client.generate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_text_rest_bad_request( + transport: str = "rest", request_type=text_service.GenerateTextRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_text(request) + + +def test_generate_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.generate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:generateText" % client.transport._host, args[1] + ) + + +def test_generate_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +def test_generate_text_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.embed_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +def test_embed_text_rest_required_fields(request_type=text_service.EmbedTextRequest): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.embed_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("model",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_embed_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.EmbedTextResponse.to_json( + text_service.EmbedTextResponse() + ) + + request = text_service.EmbedTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.EmbedTextResponse() + + client.embed_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_embed_text_rest_bad_request( + transport: str = "rest", request_type=text_service.EmbedTextRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.embed_text(request) + + +def test_embed_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + text="text_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.embed_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:embedText" % client.transport._host, args[1] + ) + + +def test_embed_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +def test_embed_text_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.BatchEmbedTextRequest, + dict, + ], +) +def test_batch_embed_text_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_embed_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +def test_batch_embed_text_rest_required_fields( + request_type=text_service.BatchEmbedTextRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_embed_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_embed_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_embed_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("model",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_embed_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_batch_embed_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_batch_embed_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.BatchEmbedTextRequest.pb( + text_service.BatchEmbedTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.BatchEmbedTextResponse.to_json( + text_service.BatchEmbedTextResponse() + ) + + request = text_service.BatchEmbedTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.BatchEmbedTextResponse() + + client.batch_embed_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_embed_text_rest_bad_request( + transport: str = "rest", request_type=text_service.BatchEmbedTextRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_embed_text(request) + + +def test_batch_embed_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + texts=["texts_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_embed_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:batchEmbedText" % client.transport._host, + args[1], + ) + + +def test_batch_embed_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +def test_batch_embed_text_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.CountTextTokensRequest, + dict, + ], +) +def test_count_text_tokens_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse( + token_count=1193, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.count_text_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +def test_count_text_tokens_rest_required_fields( + request_type=text_service.CountTextTokensRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_text_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_text_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.count_text_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_text_tokens_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_text_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_text_tokens_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_count_text_tokens" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_count_text_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = text_service.CountTextTokensRequest.pb( + text_service.CountTextTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = text_service.CountTextTokensResponse.to_json( + text_service.CountTextTokensResponse() + ) + + request = text_service.CountTextTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.CountTextTokensResponse() + + client.count_text_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_count_text_tokens_rest_bad_request( + transport: str = "rest", request_type=text_service.CountTextTokensRequest +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.count_text_tokens(request) + + +def test_count_text_tokens_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.count_text_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{model=models/*}:countTextTokens" % client.transport._host, + args[1], + ) + + +def test_count_text_tokens_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +def test_count_text_tokens_rest_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextServiceGrpcTransport, + ) + + +def test_text_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1beta.services.text_service.transports.TextServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_text", + "embed_text", + "batch_embed_text", + "count_text_tokens", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1beta.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_text_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1beta.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport() + adc.assert_called_once() + + +def test_text_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + ], +) +def test_text_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_text_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextServiceGrpcTransport, grpc_helpers), + (transports.TextServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_text_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_no_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_with_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_text._session + session2 = client2.transport.generate_text._session + assert session1 != session2 + session1 = client1.transport.embed_text._session + session2 = client2.transport.embed_text._session + assert session1 != session2 + session1 = client1.transport.batch_embed_text._session + session2 = client2.transport.batch_embed_text._session + assert session1 != session2 + session1 = client1.transport.count_text_tokens._session + session2 = client2.transport.count_text_tokens._session + assert session1 != session2 + + +def test_text_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = TextServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = TextServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = TextServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = TextServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = TextServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = TextServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = TextServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = TextServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TextServiceClient, transports.TextServiceGrpcTransport), + (TextServiceAsyncClient, transports.TextServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-analytics-admin/CHANGELOG.md b/packages/google-analytics-admin/CHANGELOG.md index 90bd379d4fff..94ca3d58d785 100644 --- a/packages/google-analytics-admin/CHANGELOG.md +++ b/packages/google-analytics-admin/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.22.2](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.22.1...google-analytics-admin-v0.22.2) (2024-01-04) + + +### Features + +* **v1alpha:** Add `GetSubpropertyEventFilter`, `ListSubpropertyEventFilters` methods to the Admin API v1 alpha ([fd30dff](https://github.com/googleapis/google-cloud-python/commit/fd30dff92a6e1523699e0f7340029e3187c42944)) +* **v1alpha:** Add the `default_conversion_value` field to the `ConversionEvent` type ([fd30dff](https://github.com/googleapis/google-cloud-python/commit/fd30dff92a6e1523699e0f7340029e3187c42944)) + + +### Documentation + +* **v1alpha:** Update the documentation for `grouping_rule`, `system_defined` fields of the `ChannelGroup` type ([fd30dff](https://github.com/googleapis/google-cloud-python/commit/fd30dff92a6e1523699e0f7340029e3187c42944)) +* **v1alpha:** Update the documentation for the `RunAccessReport` method ([fd30dff](https://github.com/googleapis/google-cloud-python/commit/fd30dff92a6e1523699e0f7340029e3187c42944)) + ## [0.22.1](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.22.0...google-analytics-admin-v0.22.1) (2023-12-07) diff --git a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py index 4fa9382283eb..d033d9aa4041 100644 --- a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.1" # {x-release-please-version} +__version__ = "0.22.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json index abe5b8e2f2c0..b8fa8c8d120d 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json @@ -410,6 +410,11 @@ "get_search_ads360_link" ] }, + "GetSubpropertyEventFilter": { + "methods": [ + "get_subproperty_event_filter" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" @@ -525,6 +530,11 @@ "list_search_ads360_links" ] }, + "ListSubpropertyEventFilters": { + "methods": [ + "list_subproperty_event_filters" + ] + }, "ProvisionAccountTicket": { "methods": [ "provision_account_ticket" @@ -649,6 +659,11 @@ "methods": [ "update_search_ads360_link" ] + }, + "UpdateSubpropertyEventFilter": { + "methods": [ + "update_subproperty_event_filter" + ] } } }, @@ -1055,6 +1070,11 @@ "get_search_ads360_link" ] }, + "GetSubpropertyEventFilter": { + "methods": [ + "get_subproperty_event_filter" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" @@ -1170,6 +1190,11 @@ "list_search_ads360_links" ] }, + "ListSubpropertyEventFilters": { + "methods": [ + "list_subproperty_event_filters" + ] + }, "ProvisionAccountTicket": { "methods": [ "provision_account_ticket" @@ -1294,6 +1319,11 @@ "methods": [ "update_search_ads360_link" ] + }, + "UpdateSubpropertyEventFilter": { + "methods": [ + "update_subproperty_event_filter" + ] } } }, @@ -1700,6 +1730,11 @@ "get_search_ads360_link" ] }, + "GetSubpropertyEventFilter": { + "methods": [ + "get_subproperty_event_filter" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" @@ -1815,6 +1850,11 @@ "list_search_ads360_links" ] }, + "ListSubpropertyEventFilters": { + "methods": [ + "list_subproperty_event_filters" + ] + }, "ProvisionAccountTicket": { "methods": [ "provision_account_ticket" @@ -1939,6 +1979,11 @@ "methods": [ "update_search_ads360_link" ] + }, + "UpdateSubpropertyEventFilter": { + "methods": [ + "update_subproperty_event_filter" + ] } } } diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py index 4fa9382283eb..d033d9aa4041 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.1" # {x-release-please-version} +__version__ = "0.22.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py index f3531ea55cd0..a0ee8f05ecae 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py @@ -6920,9 +6920,10 @@ async def run_access_report( provides records of each time a user reads Google Analytics reporting data. Access records are retained for up to 2 years. - Data Access Reports can be requested for a property. The - property must be in Google Analytics 360. This method is only - available to Administrators. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. These data access records include GA4 UI Reporting, GA4 UI Explorations, GA4 Data API, and other products like Firebase & @@ -10523,26 +10524,117 @@ async def create_subproperty( # Done; return the response. return response - async def delete_subproperty_event_filter( + async def create_subproperty_event_filter( self, request: Optional[ - Union[analytics_admin.DeleteSubpropertyEventFilterRequest, dict] + Union[analytics_admin.CreateSubpropertyEventFilterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + subproperty_event_filter: Optional[ + gaa_subproperty_event_filter.SubpropertyEventFilter + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + r"""Creates a subproperty Event Filter. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.CreateSubpropertyEventFilterRequest, dict]]): + The request object. Request message for + CreateSubpropertyEventFilter RPC. + parent (:class:`str`): + Required. The ordinary property for which to create a + subproperty event filter. Format: properties/property_id + Example: properties/123 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subproperty_event_filter (:class:`google.analytics.admin_v1alpha.types.SubpropertyEventFilter`): + Required. The subproperty event + filter to create. + + This corresponds to the ``subproperty_event_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, subproperty_event_filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_admin.CreateSubpropertyEventFilterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if subproperty_event_filter is not None: + request.subproperty_event_filter = subproperty_event_filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_subproperty_event_filter, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.GetSubpropertyEventFilterRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a subproperty event filter. + ) -> subproperty_event_filter.SubpropertyEventFilter: + r"""Lookup for a single subproperty Event Filter. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteSubpropertyEventFilterRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.GetSubpropertyEventFilterRequest, dict]]): The request object. Request message for - DeleteSubpropertyEventFilter RPC. + GetSubpropertyEventFilter RPC. name (:class:`str`): Required. Resource name of the subproperty event filter - to delete. Format: + to lookup. Format: properties/property_id/subpropertyEventFilters/subproperty_event_filter Example: properties/123/subpropertyEventFilters/456 @@ -10554,6 +10646,12 @@ async def delete_subproperty_event_filter( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -10565,7 +10663,7 @@ async def delete_subproperty_event_filter( "the individual field arguments should be set." ) - request = analytics_admin.DeleteSubpropertyEventFilterRequest(request) + request = analytics_admin.GetSubpropertyEventFilterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -10575,7 +10673,7 @@ async def delete_subproperty_event_filter( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_subproperty_event_filter, + self._client._transport.get_subproperty_event_filter, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -10587,48 +10685,143 @@ async def delete_subproperty_event_filter( ) # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def create_subproperty_event_filter( + # Done; return the response. + return response + + async def list_subproperty_event_filters( self, request: Optional[ - Union[analytics_admin.CreateSubpropertyEventFilterRequest, dict] + Union[analytics_admin.ListSubpropertyEventFiltersRequest, dict] ] = None, *, parent: Optional[str] = None, - subproperty_event_filter: Optional[ - gaa_subproperty_event_filter.SubpropertyEventFilter - ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: - r"""Creates a subproperty Event Filter. + ) -> pagers.ListSubpropertyEventFiltersAsyncPager: + r"""List all subproperty Event Filters on a property. Args: - request (Optional[Union[google.analytics.admin_v1alpha.types.CreateSubpropertyEventFilterRequest, dict]]): + request (Optional[Union[google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersRequest, dict]]): The request object. Request message for - CreateSubpropertyEventFilter RPC. + ListSubpropertyEventFilters RPC. parent (:class:`str`): - Required. The ordinary property for which to create a - subproperty event filter. Format: properties/property_id - Example: properties/123 + Required. Resource name of the ordinary property. + Format: properties/property_id Example: properties/123 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSubpropertyEventFiltersAsyncPager: + Response message for + ListSubpropertyEventFilter RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_admin.ListSubpropertyEventFiltersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_subproperty_event_filters, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubpropertyEventFiltersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.UpdateSubpropertyEventFilterRequest, dict] + ] = None, + *, + subproperty_event_filter: Optional[ + gaa_subproperty_event_filter.SubpropertyEventFilter + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + r"""Updates a subproperty Event Filter. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.UpdateSubpropertyEventFilterRequest, dict]]): + The request object. Request message for + UpdateSubpropertyEventFilter RPC. subproperty_event_filter (:class:`google.analytics.admin_v1alpha.types.SubpropertyEventFilter`): Required. The subproperty event - filter to create. + filter to update. This corresponds to the ``subproperty_event_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Field names must + be in snake case (for example, "field_to_update"). + Omitted fields will not be updated. To replace the + entire entity, use one path with the string "*" to match + all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -10644,26 +10837,26 @@ async def create_subproperty_event_filter( # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, subproperty_event_filter]) + has_flattened_params = any([subproperty_event_filter, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = analytics_admin.CreateSubpropertyEventFilterRequest(request) + request = analytics_admin.UpdateSubpropertyEventFilterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent if subproperty_event_filter is not None: request.subproperty_event_filter = subproperty_event_filter + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_subproperty_event_filter, + self._client._transport.update_subproperty_event_filter, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -10671,7 +10864,14 @@ async def create_subproperty_event_filter( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "subproperty_event_filter.name", + request.subproperty_event_filter.name, + ), + ) + ), ) # Send the request. @@ -10685,6 +10885,77 @@ async def create_subproperty_event_filter( # Done; return the response. return response + async def delete_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.DeleteSubpropertyEventFilterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a subproperty event filter. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.DeleteSubpropertyEventFilterRequest, dict]]): + The request object. Request message for + DeleteSubpropertyEventFilter RPC. + name (:class:`str`): + Required. Resource name of the subproperty event filter + to delete. Format: + properties/property_id/subpropertyEventFilters/subproperty_event_filter + Example: properties/123/subpropertyEventFilters/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_admin.DeleteSubpropertyEventFilterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_subproperty_event_filter, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "AnalyticsAdminServiceAsyncClient": return self diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index fa4ece4882dc..ff0651842f16 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -7676,9 +7676,10 @@ def run_access_report( provides records of each time a user reads Google Analytics reporting data. Access records are retained for up to 2 years. - Data Access Reports can be requested for a property. The - property must be in Google Analytics 360. This method is only - available to Administrators. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. These data access records include GA4 UI Reporting, GA4 UI Explorations, GA4 Data API, and other products like Firebase & @@ -11342,26 +11343,119 @@ def create_subproperty( # Done; return the response. return response - def delete_subproperty_event_filter( + def create_subproperty_event_filter( self, request: Optional[ - Union[analytics_admin.DeleteSubpropertyEventFilterRequest, dict] + Union[analytics_admin.CreateSubpropertyEventFilterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + subproperty_event_filter: Optional[ + gaa_subproperty_event_filter.SubpropertyEventFilter + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + r"""Creates a subproperty Event Filter. + + Args: + request (Union[google.analytics.admin_v1alpha.types.CreateSubpropertyEventFilterRequest, dict]): + The request object. Request message for + CreateSubpropertyEventFilter RPC. + parent (str): + Required. The ordinary property for which to create a + subproperty event filter. Format: properties/property_id + Example: properties/123 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subproperty_event_filter (google.analytics.admin_v1alpha.types.SubpropertyEventFilter): + Required. The subproperty event + filter to create. + + This corresponds to the ``subproperty_event_filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, subproperty_event_filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_admin.CreateSubpropertyEventFilterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_admin.CreateSubpropertyEventFilterRequest): + request = analytics_admin.CreateSubpropertyEventFilterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if subproperty_event_filter is not None: + request.subproperty_event_filter = subproperty_event_filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_subproperty_event_filter + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.GetSubpropertyEventFilterRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a subproperty event filter. + ) -> subproperty_event_filter.SubpropertyEventFilter: + r"""Lookup for a single subproperty Event Filter. Args: - request (Union[google.analytics.admin_v1alpha.types.DeleteSubpropertyEventFilterRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.GetSubpropertyEventFilterRequest, dict]): The request object. Request message for - DeleteSubpropertyEventFilter RPC. + GetSubpropertyEventFilter RPC. name (str): Required. Resource name of the subproperty event filter - to delete. Format: + to lookup. Format: properties/property_id/subpropertyEventFilters/subproperty_event_filter Example: properties/123/subpropertyEventFilters/456 @@ -11373,6 +11467,12 @@ def delete_subproperty_event_filter( timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.types.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -11385,11 +11485,11 @@ def delete_subproperty_event_filter( ) # Minor optimization to avoid making a copy if the user passes - # in a analytics_admin.DeleteSubpropertyEventFilterRequest. + # in a analytics_admin.GetSubpropertyEventFilterRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, analytics_admin.DeleteSubpropertyEventFilterRequest): - request = analytics_admin.DeleteSubpropertyEventFilterRequest(request) + if not isinstance(request, analytics_admin.GetSubpropertyEventFilterRequest): + request = analytics_admin.GetSubpropertyEventFilterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -11398,7 +11498,7 @@ def delete_subproperty_event_filter( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.delete_subproperty_event_filter + self._transport.get_subproperty_event_filter ] # Certain fields should be provided within the metadata header; @@ -11408,48 +11508,145 @@ def delete_subproperty_event_filter( ) # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def create_subproperty_event_filter( + # Done; return the response. + return response + + def list_subproperty_event_filters( self, request: Optional[ - Union[analytics_admin.CreateSubpropertyEventFilterRequest, dict] + Union[analytics_admin.ListSubpropertyEventFiltersRequest, dict] ] = None, *, parent: Optional[str] = None, - subproperty_event_filter: Optional[ - gaa_subproperty_event_filter.SubpropertyEventFilter - ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: - r"""Creates a subproperty Event Filter. + ) -> pagers.ListSubpropertyEventFiltersPager: + r"""List all subproperty Event Filters on a property. Args: - request (Union[google.analytics.admin_v1alpha.types.CreateSubpropertyEventFilterRequest, dict]): + request (Union[google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersRequest, dict]): The request object. Request message for - CreateSubpropertyEventFilter RPC. + ListSubpropertyEventFilters RPC. parent (str): - Required. The ordinary property for which to create a - subproperty event filter. Format: properties/property_id - Example: properties/123 + Required. Resource name of the ordinary property. + Format: properties/property_id Example: properties/123 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.admin_v1alpha.services.analytics_admin_service.pagers.ListSubpropertyEventFiltersPager: + Response message for + ListSubpropertyEventFilter RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_admin.ListSubpropertyEventFiltersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_admin.ListSubpropertyEventFiltersRequest): + request = analytics_admin.ListSubpropertyEventFiltersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_subproperty_event_filters + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubpropertyEventFiltersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.UpdateSubpropertyEventFilterRequest, dict] + ] = None, + *, + subproperty_event_filter: Optional[ + gaa_subproperty_event_filter.SubpropertyEventFilter + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + r"""Updates a subproperty Event Filter. + + Args: + request (Union[google.analytics.admin_v1alpha.types.UpdateSubpropertyEventFilterRequest, dict]): + The request object. Request message for + UpdateSubpropertyEventFilter RPC. subproperty_event_filter (google.analytics.admin_v1alpha.types.SubpropertyEventFilter): Required. The subproperty event - filter to create. + filter to update. This corresponds to the ``subproperty_event_filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Field names must + be in snake case (for example, "field_to_update"). + Omitted fields will not be updated. To replace the + entire entity, use one path with the string "*" to match + all fields. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -11465,7 +11662,7 @@ def create_subproperty_event_filter( # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, subproperty_event_filter]) + has_flattened_params = any([subproperty_event_filter, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -11473,28 +11670,35 @@ def create_subproperty_event_filter( ) # Minor optimization to avoid making a copy if the user passes - # in a analytics_admin.CreateSubpropertyEventFilterRequest. + # in a analytics_admin.UpdateSubpropertyEventFilterRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, analytics_admin.CreateSubpropertyEventFilterRequest): - request = analytics_admin.CreateSubpropertyEventFilterRequest(request) + if not isinstance(request, analytics_admin.UpdateSubpropertyEventFilterRequest): + request = analytics_admin.UpdateSubpropertyEventFilterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent if subproperty_event_filter is not None: request.subproperty_event_filter = subproperty_event_filter + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.create_subproperty_event_filter + self._transport.update_subproperty_event_filter ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "subproperty_event_filter.name", + request.subproperty_event_filter.name, + ), + ) + ), ) # Send the request. @@ -11508,6 +11712,79 @@ def create_subproperty_event_filter( # Done; return the response. return response + def delete_subproperty_event_filter( + self, + request: Optional[ + Union[analytics_admin.DeleteSubpropertyEventFilterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a subproperty event filter. + + Args: + request (Union[google.analytics.admin_v1alpha.types.DeleteSubpropertyEventFilterRequest, dict]): + The request object. Request message for + DeleteSubpropertyEventFilter RPC. + name (str): + Required. Resource name of the subproperty event filter + to delete. Format: + properties/property_id/subpropertyEventFilters/subproperty_event_filter + Example: properties/123/subpropertyEventFilters/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_admin.DeleteSubpropertyEventFilterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_admin.DeleteSubpropertyEventFilterRequest): + request = analytics_admin.DeleteSubpropertyEventFilterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_subproperty_event_filter + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def __enter__(self) -> "AnalyticsAdminServiceClient": return self diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py index 38075c2e8cac..80d3af3d7dd7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/pagers.py @@ -31,6 +31,7 @@ event_create_and_edit, expanded_data_set, resources, + subproperty_event_filter, ) @@ -3044,3 +3045,137 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubpropertyEventFiltersPager: + """A pager for iterating through ``list_subproperty_event_filters`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subproperty_event_filters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubpropertyEventFilters`` requests and continue to iterate + through the ``subproperty_event_filters`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analytics_admin.ListSubpropertyEventFiltersResponse], + request: analytics_admin.ListSubpropertyEventFiltersRequest, + response: analytics_admin.ListSubpropertyEventFiltersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListSubpropertyEventFiltersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_admin.ListSubpropertyEventFiltersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[subproperty_event_filter.SubpropertyEventFilter]: + for page in self.pages: + yield from page.subproperty_event_filters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubpropertyEventFiltersAsyncPager: + """A pager for iterating through ``list_subproperty_event_filters`` requests. + + This class thinly wraps an initial + :class:`google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subproperty_event_filters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubpropertyEventFilters`` requests and continue to iterate + through the ``subproperty_event_filters`` field on the + corresponding responses. + + All the usual :class:`google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[analytics_admin.ListSubpropertyEventFiltersResponse] + ], + request: analytics_admin.ListSubpropertyEventFiltersRequest, + response: analytics_admin.ListSubpropertyEventFiltersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersRequest): + The initial request object. + response (google.analytics.admin_v1alpha.types.ListSubpropertyEventFiltersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_admin.ListSubpropertyEventFiltersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[analytics_admin.ListSubpropertyEventFiltersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[subproperty_event_filter.SubpropertyEventFilter]: + async def async_generator(): + async for page in self.pages: + for response in page.subproperty_event_filters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py index 13fa58516e04..58d4c04382d5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py @@ -40,6 +40,7 @@ from google.analytics.admin_v1alpha.types import event_create_and_edit from google.analytics.admin_v1alpha.types import expanded_data_set from google.analytics.admin_v1alpha.types import resources +from google.analytics.admin_v1alpha.types import subproperty_event_filter DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -771,13 +772,28 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.delete_subproperty_event_filter: gapic_v1.method.wrap_method( - self.delete_subproperty_event_filter, + self.create_subproperty_event_filter: gapic_v1.method.wrap_method( + self.create_subproperty_event_filter, default_timeout=None, client_info=client_info, ), - self.create_subproperty_event_filter: gapic_v1.method.wrap_method( - self.create_subproperty_event_filter, + self.get_subproperty_event_filter: gapic_v1.method.wrap_method( + self.get_subproperty_event_filter, + default_timeout=None, + client_info=client_info, + ), + self.list_subproperty_event_filters: gapic_v1.method.wrap_method( + self.list_subproperty_event_filters, + default_timeout=None, + client_info=client_info, + ), + self.update_subproperty_event_filter: gapic_v1.method.wrap_method( + self.update_subproperty_event_filter, + default_timeout=None, + client_info=client_info, + ), + self.delete_subproperty_event_filter: gapic_v1.method.wrap_method( + self.delete_subproperty_event_filter, default_timeout=None, client_info=client_info, ), @@ -2124,19 +2140,46 @@ def create_subproperty( raise NotImplementedError() @property - def delete_subproperty_event_filter( + def create_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.DeleteSubpropertyEventFilterRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + [analytics_admin.CreateSubpropertyEventFilterRequest], + Union[ + gaa_subproperty_event_filter.SubpropertyEventFilter, + Awaitable[gaa_subproperty_event_filter.SubpropertyEventFilter], + ], ]: raise NotImplementedError() @property - def create_subproperty_event_filter( + def get_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyEventFilterRequest], + [analytics_admin.GetSubpropertyEventFilterRequest], + Union[ + subproperty_event_filter.SubpropertyEventFilter, + Awaitable[subproperty_event_filter.SubpropertyEventFilter], + ], + ]: + raise NotImplementedError() + + @property + def list_subproperty_event_filters( + self, + ) -> Callable[ + [analytics_admin.ListSubpropertyEventFiltersRequest], + Union[ + analytics_admin.ListSubpropertyEventFiltersResponse, + Awaitable[analytics_admin.ListSubpropertyEventFiltersResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.UpdateSubpropertyEventFilterRequest], Union[ gaa_subproperty_event_filter.SubpropertyEventFilter, Awaitable[gaa_subproperty_event_filter.SubpropertyEventFilter], @@ -2144,6 +2187,15 @@ def create_subproperty_event_filter( ]: raise NotImplementedError() + @property + def delete_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.DeleteSubpropertyEventFilterRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py index 489eeaaeaa4c..1494ca25ee66 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py @@ -37,6 +37,7 @@ from google.analytics.admin_v1alpha.types import event_create_and_edit from google.analytics.admin_v1alpha.types import expanded_data_set from google.analytics.admin_v1alpha.types import resources +from google.analytics.admin_v1alpha.types import subproperty_event_filter from .base import DEFAULT_CLIENT_INFO, AnalyticsAdminServiceTransport @@ -2601,9 +2602,10 @@ def run_access_report( provides records of each time a user reads Google Analytics reporting data. Access records are retained for up to 2 years. - Data Access Reports can be requested for a property. The - property must be in Google Analytics 360. This method is only - available to Administrators. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. These data access records include GA4 UI Reporting, GA4 UI Explorations, GA4 Data API, and other products like Firebase & @@ -3990,19 +3992,20 @@ def create_subproperty( return self._stubs["create_subproperty"] @property - def delete_subproperty_event_filter( + def create_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.DeleteSubpropertyEventFilterRequest], empty_pb2.Empty + [analytics_admin.CreateSubpropertyEventFilterRequest], + gaa_subproperty_event_filter.SubpropertyEventFilter, ]: - r"""Return a callable for the delete subproperty event + r"""Return a callable for the create subproperty event filter method over gRPC. - Deletes a subproperty event filter. + Creates a subproperty Event Filter. Returns: - Callable[[~.DeleteSubpropertyEventFilterRequest], - ~.Empty]: + Callable[[~.CreateSubpropertyEventFilterRequest], + ~.SubpropertyEventFilter]: A function that, when called, will call the underlying RPC on the server. """ @@ -4010,30 +4013,90 @@ def delete_subproperty_event_filter( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_subproperty_event_filter" not in self._stubs: + if "create_subproperty_event_filter" not in self._stubs: self._stubs[ - "delete_subproperty_event_filter" + "create_subproperty_event_filter" ] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteSubpropertyEventFilter", - request_serializer=analytics_admin.DeleteSubpropertyEventFilterRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubpropertyEventFilter", + request_serializer=analytics_admin.CreateSubpropertyEventFilterRequest.serialize, + response_deserializer=gaa_subproperty_event_filter.SubpropertyEventFilter.deserialize, ) - return self._stubs["delete_subproperty_event_filter"] + return self._stubs["create_subproperty_event_filter"] @property - def create_subproperty_event_filter( + def get_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyEventFilterRequest], + [analytics_admin.GetSubpropertyEventFilterRequest], + subproperty_event_filter.SubpropertyEventFilter, + ]: + r"""Return a callable for the get subproperty event filter method over gRPC. + + Lookup for a single subproperty Event Filter. + + Returns: + Callable[[~.GetSubpropertyEventFilterRequest], + ~.SubpropertyEventFilter]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subproperty_event_filter" not in self._stubs: + self._stubs["get_subproperty_event_filter"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetSubpropertyEventFilter", + request_serializer=analytics_admin.GetSubpropertyEventFilterRequest.serialize, + response_deserializer=subproperty_event_filter.SubpropertyEventFilter.deserialize, + ) + return self._stubs["get_subproperty_event_filter"] + + @property + def list_subproperty_event_filters( + self, + ) -> Callable[ + [analytics_admin.ListSubpropertyEventFiltersRequest], + analytics_admin.ListSubpropertyEventFiltersResponse, + ]: + r"""Return a callable for the list subproperty event filters method over gRPC. + + List all subproperty Event Filters on a property. + + Returns: + Callable[[~.ListSubpropertyEventFiltersRequest], + ~.ListSubpropertyEventFiltersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subproperty_event_filters" not in self._stubs: + self._stubs[ + "list_subproperty_event_filters" + ] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListSubpropertyEventFilters", + request_serializer=analytics_admin.ListSubpropertyEventFiltersRequest.serialize, + response_deserializer=analytics_admin.ListSubpropertyEventFiltersResponse.deserialize, + ) + return self._stubs["list_subproperty_event_filters"] + + @property + def update_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.UpdateSubpropertyEventFilterRequest], gaa_subproperty_event_filter.SubpropertyEventFilter, ]: - r"""Return a callable for the create subproperty event + r"""Return a callable for the update subproperty event filter method over gRPC. - Creates a subproperty Event Filter. + Updates a subproperty Event Filter. Returns: - Callable[[~.CreateSubpropertyEventFilterRequest], + Callable[[~.UpdateSubpropertyEventFilterRequest], ~.SubpropertyEventFilter]: A function that, when called, will call the underlying RPC on the server. @@ -4042,15 +4105,46 @@ def create_subproperty_event_filter( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_subproperty_event_filter" not in self._stubs: + if "update_subproperty_event_filter" not in self._stubs: self._stubs[ - "create_subproperty_event_filter" + "update_subproperty_event_filter" ] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubpropertyEventFilter", - request_serializer=analytics_admin.CreateSubpropertyEventFilterRequest.serialize, + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateSubpropertyEventFilter", + request_serializer=analytics_admin.UpdateSubpropertyEventFilterRequest.serialize, response_deserializer=gaa_subproperty_event_filter.SubpropertyEventFilter.deserialize, ) - return self._stubs["create_subproperty_event_filter"] + return self._stubs["update_subproperty_event_filter"] + + @property + def delete_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.DeleteSubpropertyEventFilterRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete subproperty event + filter method over gRPC. + + Deletes a subproperty event filter. + + Returns: + Callable[[~.DeleteSubpropertyEventFilterRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subproperty_event_filter" not in self._stubs: + self._stubs[ + "delete_subproperty_event_filter" + ] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteSubpropertyEventFilter", + request_serializer=analytics_admin.DeleteSubpropertyEventFilterRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_subproperty_event_filter"] def close(self): self.grpc_channel.close() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py index a4aa1b4e8cda..36cdeb8608c4 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py @@ -37,6 +37,7 @@ from google.analytics.admin_v1alpha.types import event_create_and_edit from google.analytics.admin_v1alpha.types import expanded_data_set from google.analytics.admin_v1alpha.types import resources +from google.analytics.admin_v1alpha.types import subproperty_event_filter from .base import DEFAULT_CLIENT_INFO, AnalyticsAdminServiceTransport from .grpc import AnalyticsAdminServiceGrpcTransport @@ -2661,9 +2662,10 @@ def run_access_report( provides records of each time a user reads Google Analytics reporting data. Access records are retained for up to 2 years. - Data Access Reports can be requested for a property. The - property must be in Google Analytics 360. This method is only - available to Administrators. + Data Access Reports can be requested for a property. Reports may + be requested for any property, but dimensions that aren't + related to quota can only be requested on Google Analytics 360 + properties. This method is only available to Administrators. These data access records include GA4 UI Reporting, GA4 UI Explorations, GA4 Data API, and other products like Firebase & @@ -4078,20 +4080,20 @@ def create_subproperty( return self._stubs["create_subproperty"] @property - def delete_subproperty_event_filter( + def create_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.DeleteSubpropertyEventFilterRequest], - Awaitable[empty_pb2.Empty], + [analytics_admin.CreateSubpropertyEventFilterRequest], + Awaitable[gaa_subproperty_event_filter.SubpropertyEventFilter], ]: - r"""Return a callable for the delete subproperty event + r"""Return a callable for the create subproperty event filter method over gRPC. - Deletes a subproperty event filter. + Creates a subproperty Event Filter. Returns: - Callable[[~.DeleteSubpropertyEventFilterRequest], - Awaitable[~.Empty]]: + Callable[[~.CreateSubpropertyEventFilterRequest], + Awaitable[~.SubpropertyEventFilter]]: A function that, when called, will call the underlying RPC on the server. """ @@ -4099,30 +4101,90 @@ def delete_subproperty_event_filter( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_subproperty_event_filter" not in self._stubs: + if "create_subproperty_event_filter" not in self._stubs: self._stubs[ - "delete_subproperty_event_filter" + "create_subproperty_event_filter" ] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteSubpropertyEventFilter", - request_serializer=analytics_admin.DeleteSubpropertyEventFilterRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubpropertyEventFilter", + request_serializer=analytics_admin.CreateSubpropertyEventFilterRequest.serialize, + response_deserializer=gaa_subproperty_event_filter.SubpropertyEventFilter.deserialize, ) - return self._stubs["delete_subproperty_event_filter"] + return self._stubs["create_subproperty_event_filter"] @property - def create_subproperty_event_filter( + def get_subproperty_event_filter( self, ) -> Callable[ - [analytics_admin.CreateSubpropertyEventFilterRequest], + [analytics_admin.GetSubpropertyEventFilterRequest], + Awaitable[subproperty_event_filter.SubpropertyEventFilter], + ]: + r"""Return a callable for the get subproperty event filter method over gRPC. + + Lookup for a single subproperty Event Filter. + + Returns: + Callable[[~.GetSubpropertyEventFilterRequest], + Awaitable[~.SubpropertyEventFilter]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subproperty_event_filter" not in self._stubs: + self._stubs["get_subproperty_event_filter"] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetSubpropertyEventFilter", + request_serializer=analytics_admin.GetSubpropertyEventFilterRequest.serialize, + response_deserializer=subproperty_event_filter.SubpropertyEventFilter.deserialize, + ) + return self._stubs["get_subproperty_event_filter"] + + @property + def list_subproperty_event_filters( + self, + ) -> Callable[ + [analytics_admin.ListSubpropertyEventFiltersRequest], + Awaitable[analytics_admin.ListSubpropertyEventFiltersResponse], + ]: + r"""Return a callable for the list subproperty event filters method over gRPC. + + List all subproperty Event Filters on a property. + + Returns: + Callable[[~.ListSubpropertyEventFiltersRequest], + Awaitable[~.ListSubpropertyEventFiltersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subproperty_event_filters" not in self._stubs: + self._stubs[ + "list_subproperty_event_filters" + ] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/ListSubpropertyEventFilters", + request_serializer=analytics_admin.ListSubpropertyEventFiltersRequest.serialize, + response_deserializer=analytics_admin.ListSubpropertyEventFiltersResponse.deserialize, + ) + return self._stubs["list_subproperty_event_filters"] + + @property + def update_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.UpdateSubpropertyEventFilterRequest], Awaitable[gaa_subproperty_event_filter.SubpropertyEventFilter], ]: - r"""Return a callable for the create subproperty event + r"""Return a callable for the update subproperty event filter method over gRPC. - Creates a subproperty Event Filter. + Updates a subproperty Event Filter. Returns: - Callable[[~.CreateSubpropertyEventFilterRequest], + Callable[[~.UpdateSubpropertyEventFilterRequest], Awaitable[~.SubpropertyEventFilter]]: A function that, when called, will call the underlying RPC on the server. @@ -4131,15 +4193,47 @@ def create_subproperty_event_filter( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_subproperty_event_filter" not in self._stubs: + if "update_subproperty_event_filter" not in self._stubs: self._stubs[ - "create_subproperty_event_filter" + "update_subproperty_event_filter" ] = self.grpc_channel.unary_unary( - "/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateSubpropertyEventFilter", - request_serializer=analytics_admin.CreateSubpropertyEventFilterRequest.serialize, + "/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateSubpropertyEventFilter", + request_serializer=analytics_admin.UpdateSubpropertyEventFilterRequest.serialize, response_deserializer=gaa_subproperty_event_filter.SubpropertyEventFilter.deserialize, ) - return self._stubs["create_subproperty_event_filter"] + return self._stubs["update_subproperty_event_filter"] + + @property + def delete_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.DeleteSubpropertyEventFilterRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete subproperty event + filter method over gRPC. + + Deletes a subproperty event filter. + + Returns: + Callable[[~.DeleteSubpropertyEventFilterRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subproperty_event_filter" not in self._stubs: + self._stubs[ + "delete_subproperty_event_filter" + ] = self.grpc_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteSubpropertyEventFilter", + request_serializer=analytics_admin.DeleteSubpropertyEventFilterRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_subproperty_event_filter"] def close(self): return self.grpc_channel.close() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py index 91c4fe8b17fe..c860ca6a60b2 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py @@ -52,6 +52,7 @@ from google.analytics.admin_v1alpha.types import event_create_and_edit from google.analytics.admin_v1alpha.types import expanded_data_set from google.analytics.admin_v1alpha.types import resources +from google.analytics.admin_v1alpha.types import subproperty_event_filter from .base import AnalyticsAdminServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -630,6 +631,14 @@ def post_get_sk_ad_network_conversion_value_schema(self, response): logging.log(f"Received response: {response}") return response + def pre_get_subproperty_event_filter(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_subproperty_event_filter(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_access_bindings(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -814,6 +823,14 @@ def post_list_sk_ad_network_conversion_value_schemas(self, response): logging.log(f"Received response: {response}") return response + def pre_list_subproperty_event_filters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_subproperty_event_filters(self, response): + logging.log(f"Received response: {response}") + return response + def pre_provision_account_ticket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1014,6 +1031,14 @@ def post_update_sk_ad_network_conversion_value_schema(self, response): logging.log(f"Received response: {response}") return response + def pre_update_subproperty_event_filter(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_subproperty_event_filter(self, response): + logging.log(f"Received response: {response}") + return response + transport = AnalyticsAdminServiceRestTransport(interceptor=MyCustomAnalyticsAdminServiceInterceptor()) client = AnalyticsAdminServiceClient(transport=transport) @@ -2697,6 +2722,31 @@ def post_get_sk_ad_network_conversion_value_schema( """ return response + def pre_get_subproperty_event_filter( + self, + request: analytics_admin.GetSubpropertyEventFilterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_admin.GetSubpropertyEventFilterRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_subproperty_event_filter + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_get_subproperty_event_filter( + self, response: subproperty_event_filter.SubpropertyEventFilter + ) -> subproperty_event_filter.SubpropertyEventFilter: + """Post-rpc interceptor for get_subproperty_event_filter + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_list_access_bindings( self, request: analytics_admin.ListAccessBindingsRequest, @@ -3240,6 +3290,31 @@ def post_list_sk_ad_network_conversion_value_schemas( """ return response + def pre_list_subproperty_event_filters( + self, + request: analytics_admin.ListSubpropertyEventFiltersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_admin.ListSubpropertyEventFiltersRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_subproperty_event_filters + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_list_subproperty_event_filters( + self, response: analytics_admin.ListSubpropertyEventFiltersResponse + ) -> analytics_admin.ListSubpropertyEventFiltersResponse: + """Post-rpc interceptor for list_subproperty_event_filters + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + def pre_provision_account_ticket( self, request: analytics_admin.ProvisionAccountTicketRequest, @@ -3840,6 +3915,31 @@ def post_update_sk_ad_network_conversion_value_schema( """ return response + def pre_update_subproperty_event_filter( + self, + request: analytics_admin.UpdateSubpropertyEventFilterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_admin.UpdateSubpropertyEventFilterRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_subproperty_event_filter + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_update_subproperty_event_filter( + self, response: gaa_subproperty_event_filter.SubpropertyEventFilter + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + """Post-rpc interceptor for update_subproperty_event_filter + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class AnalyticsAdminServiceRestStub: @@ -11433,6 +11533,98 @@ def __call__( ) return resp + class _GetSubpropertyEventFilter(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("GetSubpropertyEventFilter") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.GetSubpropertyEventFilterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> subproperty_event_filter.SubpropertyEventFilter: + r"""Call the get subproperty event + filter method over HTTP. + + Args: + request (~.analytics_admin.GetSubpropertyEventFilterRequest): + The request object. Request message for + GetSubpropertyEventFilter RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.subproperty_event_filter.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/subpropertyEventFilters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_subproperty_event_filter( + request, metadata + ) + pb_request = analytics_admin.GetSubpropertyEventFilterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = subproperty_event_filter.SubpropertyEventFilter() + pb_resp = subproperty_event_filter.SubpropertyEventFilter.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_subproperty_event_filter(resp) + return resp + class _ListAccessBindings(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("ListAccessBindings") @@ -13543,6 +13735,98 @@ def __call__( ) return resp + class _ListSubpropertyEventFilters(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("ListSubpropertyEventFilters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.ListSubpropertyEventFiltersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_admin.ListSubpropertyEventFiltersResponse: + r"""Call the list subproperty event + filters method over HTTP. + + Args: + request (~.analytics_admin.ListSubpropertyEventFiltersRequest): + The request object. Request message for + ListSubpropertyEventFilters RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_admin.ListSubpropertyEventFiltersResponse: + Response message for + ListSubpropertyEventFilter RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=properties/*}/subpropertyEventFilters", + }, + ] + request, metadata = self._interceptor.pre_list_subproperty_event_filters( + request, metadata + ) + pb_request = analytics_admin.ListSubpropertyEventFiltersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_admin.ListSubpropertyEventFiltersResponse() + pb_resp = analytics_admin.ListSubpropertyEventFiltersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_subproperty_event_filters(resp) + return resp + class _ProvisionAccountTicket(AnalyticsAdminServiceRestStub): def __hash__(self): return hash("ProvisionAccountTicket") @@ -16121,6 +16405,109 @@ def __call__( ) return resp + class _UpdateSubpropertyEventFilter(AnalyticsAdminServiceRestStub): + def __hash__(self): + return hash("UpdateSubpropertyEventFilter") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_admin.UpdateSubpropertyEventFilterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: + r"""Call the update subproperty event + filter method over HTTP. + + Args: + request (~.analytics_admin.UpdateSubpropertyEventFilterRequest): + The request object. Request message for + UpdateSubpropertyEventFilter RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gaa_subproperty_event_filter.SubpropertyEventFilter: + A resource message representing a GA4 + Subproperty event filter. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{subproperty_event_filter.name=properties/*/subpropertyEventFilters/*}", + "body": "subproperty_event_filter", + }, + ] + request, metadata = self._interceptor.pre_update_subproperty_event_filter( + request, metadata + ) + pb_request = analytics_admin.UpdateSubpropertyEventFilterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gaa_subproperty_event_filter.SubpropertyEventFilter() + pb_resp = gaa_subproperty_event_filter.SubpropertyEventFilter.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_subproperty_event_filter(resp) + return resp + @property def acknowledge_user_data_collection( self, @@ -16885,6 +17272,17 @@ def get_sk_ad_network_conversion_value_schema( # In C++ this would require a dynamic_cast return self._GetSKAdNetworkConversionValueSchema(self._session, self._host, self._interceptor) # type: ignore + @property + def get_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.GetSubpropertyEventFilterRequest], + subproperty_event_filter.SubpropertyEventFilter, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSubpropertyEventFilter(self._session, self._host, self._interceptor) # type: ignore + @property def list_access_bindings( self, @@ -17135,6 +17533,17 @@ def list_sk_ad_network_conversion_value_schemas( # In C++ this would require a dynamic_cast return self._ListSKAdNetworkConversionValueSchemas(self._session, self._host, self._interceptor) # type: ignore + @property + def list_subproperty_event_filters( + self, + ) -> Callable[ + [analytics_admin.ListSubpropertyEventFiltersRequest], + analytics_admin.ListSubpropertyEventFiltersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSubpropertyEventFilters(self._session, self._host, self._interceptor) # type: ignore + @property def provision_account_ticket( self, @@ -17389,6 +17798,17 @@ def update_sk_ad_network_conversion_value_schema( # In C++ this would require a dynamic_cast return self._UpdateSKAdNetworkConversionValueSchema(self._session, self._host, self._interceptor) # type: ignore + @property + def update_subproperty_event_filter( + self, + ) -> Callable[ + [analytics_admin.UpdateSubpropertyEventFilterRequest], + gaa_subproperty_event_filter.SubpropertyEventFilter, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSubpropertyEventFilter(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py index 69a85cc56cda..262c69b630b4 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/channel_group.py @@ -262,10 +262,12 @@ class ChannelGroup(proto.Message): length of 256 characters. grouping_rule (MutableSequence[google.analytics.admin_v1alpha.types.GroupingRule]): Required. The grouping rules of channels. - Maximum number of rules is 25. + Maximum number of rules is 50. system_defined (bool): - Output only. Default Channel Group defined by - Google, which cannot be updated. + Output only. If true, then this channel group + is the Default Channel Group predefined by + Google Analytics. Display name and grouping + rules cannot be updated for this channel group. """ name: str = proto.Field( diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py index 6c4f8194f134..39dd6c5b0ecd 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py @@ -2009,6 +2009,8 @@ class LinkProposalStatusDetails(proto.Message): class ConversionEvent(proto.Message): r"""A conversion event in a Google Analytics property. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Resource name of this conversion event. Format: @@ -2037,6 +2039,11 @@ class ConversionEvent(proto.Message): Optional. The method by which conversions will be counted across multiple events within a session. If this value is not provided, it will be set to ``ONCE_PER_EVENT``. + default_conversion_value (google.analytics.admin_v1alpha.types.ConversionEvent.DefaultConversionValue): + Optional. Defines a default value/currency + for a conversion event. + + This field is a member of `oneof`_ ``_default_conversion_value``. """ class ConversionCountingMethod(proto.Enum): @@ -2057,6 +2064,40 @@ class ConversionCountingMethod(proto.Enum): ONCE_PER_EVENT = 1 ONCE_PER_SESSION = 2 + class DefaultConversionValue(proto.Message): + r"""Defines a default value/currency for a conversion event. Both + value and currency must be provided. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (float): + This value will be used to populate the value for all + conversions of the specified event_name where the event + "value" parameter is unset. + + This field is a member of `oneof`_ ``_value``. + currency_code (str): + When a conversion event for this event_name has no set + currency, this currency will be applied as the default. Must + be in ISO 4217 currency code format. See + https://en.wikipedia.org/wiki/ISO_4217 for more. + + This field is a member of `oneof`_ ``_currency_code``. + """ + + value: float = proto.Field( + proto.DOUBLE, + number=1, + optional=True, + ) + currency_code: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -2083,6 +2124,12 @@ class ConversionCountingMethod(proto.Enum): number=6, enum=ConversionCountingMethod, ) + default_conversion_value: DefaultConversionValue = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=DefaultConversionValue, + ) class GoogleSignalsSettings(proto.Message): @@ -2499,46 +2546,6 @@ class ReportingAttributionModel(proto.Enum): value to the last channel that the customer clicked through (or engaged view through for YouTube) before converting. Previously CROSS_CHANNEL_LAST_CLICK - PAID_AND_ORGANIC_CHANNELS_FIRST_CLICK (3): - Starting in June 2023, new properties can no longer use this - model. See `Analytics - Help `__ - for more details. Starting in September 2023, we will sunset - this model for all properties. - - Gives all credit for the conversion to the first channel - that a customer clicked (or engaged view through for - YouTube) before converting. Previously - CROSS_CHANNEL_FIRST_CLICK - PAID_AND_ORGANIC_CHANNELS_LINEAR (4): - Starting in June 2023, new properties can no longer use this - model. See `Analytics - Help `__ - for more details. Starting in September 2023, we will sunset - this model for all properties. - - Distributes the credit for the conversion equally across all - the channels a customer clicked (or engaged view through for - YouTube) before converting. Previously CROSS_CHANNEL_LINEAR - PAID_AND_ORGANIC_CHANNELS_POSITION_BASED (5): - Starting in June 2023, new properties can no longer use this - model. See `Analytics - Help `__ - for more details. Starting in September 2023, we will sunset - this model for all properties. - - Attributes 40% credit to the first and last interaction, and - the remaining 20% credit is distributed evenly to the middle - interactions. Previously CROSS_CHANNEL_POSITION_BASED - PAID_AND_ORGANIC_CHANNELS_TIME_DECAY (6): - Starting in June 2023, new properties can no longer use this - model. See `Analytics - Help `__ - for more details. Starting in September 2023, we will sunset - this model for all properties. - - Gives more credit to the touchpoints that happened closer in - time to the conversion. Previously CROSS_CHANNEL_TIME_DECAY GOOGLE_PAID_CHANNELS_LAST_CLICK (7): Attributes 100% of the conversion value to the last Google Paid channel that the customer clicked through before @@ -2547,10 +2554,6 @@ class ReportingAttributionModel(proto.Enum): REPORTING_ATTRIBUTION_MODEL_UNSPECIFIED = 0 PAID_AND_ORGANIC_CHANNELS_DATA_DRIVEN = 1 PAID_AND_ORGANIC_CHANNELS_LAST_CLICK = 2 - PAID_AND_ORGANIC_CHANNELS_FIRST_CLICK = 3 - PAID_AND_ORGANIC_CHANNELS_LINEAR = 4 - PAID_AND_ORGANIC_CHANNELS_POSITION_BASED = 5 - PAID_AND_ORGANIC_CHANNELS_TIME_DECAY = 6 GOOGLE_PAID_CHANNELS_LAST_CLICK = 7 class AdsWebConversionDataExportScope(proto.Enum): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py index 4fa9382283eb..d033d9aa4041 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.22.1" # {x-release-please-version} +__version__ = "0.22.2" # {x-release-please-version} diff --git a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json index 1c6077a3c960..d64aea226c1f 100644 --- a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json +++ b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-admin", - "version": "0.22.1" + "version": "0.22.2" }, "snippets": [ { diff --git a/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py b/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py index 992fcdbeece6..c20f5d0376d3 100644 --- a/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py +++ b/packages/google-analytics-admin/scripts/fixup_admin_v1alpha_keywords.py @@ -119,6 +119,7 @@ class adminCallTransformer(cst.CSTTransformer): 'get_rollup_property_source_link': ('name', ), 'get_search_ads360_link': ('name', ), 'get_sk_ad_network_conversion_value_schema': ('name', ), + 'get_subproperty_event_filter': ('name', ), 'list_access_bindings': ('parent', 'page_size', 'page_token', ), 'list_accounts': ('page_size', 'page_token', 'show_deleted', ), 'list_account_summaries': ('page_size', 'page_token', ), @@ -142,6 +143,7 @@ class adminCallTransformer(cst.CSTTransformer): 'list_rollup_property_source_links': ('parent', 'page_size', 'page_token', ), 'list_search_ads360_links': ('parent', 'page_size', 'page_token', ), 'list_sk_ad_network_conversion_value_schemas': ('parent', 'page_size', 'page_token', ), + 'list_subproperty_event_filters': ('parent', 'page_size', 'page_token', ), 'provision_account_ticket': ('account', 'redirect_uri', ), 'run_access_report': ('entity', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'time_zone', 'order_bys', 'return_entity_quota', 'include_all_users', 'expand_groups', ), 'search_change_history_events': ('account', 'property', 'resource_type', 'action', 'actor_email', 'earliest_change_time', 'latest_change_time', 'page_size', 'page_token', ), @@ -167,6 +169,7 @@ class adminCallTransformer(cst.CSTTransformer): 'update_property': ('property', 'update_mask', ), 'update_search_ads360_link': ('update_mask', 'search_ads_360_link', ), 'update_sk_ad_network_conversion_value_schema': ('skadnetwork_conversion_value_schema', 'update_mask', ), + 'update_subproperty_event_filter': ('subproperty_event_filter', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index e88572ff0349..b94edce082dd 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -34902,11 +34902,11 @@ async def test_create_subproperty_async_from_dict(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteSubpropertyEventFilterRequest, + analytics_admin.CreateSubpropertyEventFilterRequest, dict, ], ) -def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): +def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -34918,22 +34918,27 @@ def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.create_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_subproperty_event_filter(request) + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + response = client.create_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" -def test_delete_subproperty_event_filter_empty_call(): +def test_create_subproperty_event_filter_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -34943,18 +34948,18 @@ def test_delete_subproperty_event_filter_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.create_subproperty_event_filter), "__call__" ) as call: - client.delete_subproperty_event_filter() + client.create_subproperty_event_filter() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_async( +async def test_create_subproperty_event_filter_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, + request_type=analytics_admin.CreateSubpropertyEventFilterRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -34967,43 +34972,323 @@ async def test_delete_subproperty_event_filter_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.create_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.create_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_async_from_dict(): - await test_delete_subproperty_event_filter_async(request_type=dict) +async def test_create_subproperty_event_filter_async_from_dict(): + await test_create_subproperty_event_filter_async(request_type=dict) -def test_delete_subproperty_event_filter_field_headers(): +def test_create_subproperty_event_filter_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.CreateSubpropertyEventFilterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.CreateSubpropertyEventFilterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + await client.create_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subproperty_event_filter( + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + + +def test_create_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subproperty_event_filter( + analytics_admin.CreateSubpropertyEventFilterRequest(), + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subproperty_event_filter( + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].subproperty_event_filter + mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subproperty_event_filter( + analytics_admin.CreateSubpropertyEventFilterRequest(), + parent="parent_value", + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( + name="name_value" + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetSubpropertyEventFilterRequest, + dict, + ], +) +def test_get_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + response = client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +def test_get_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + client.get_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) + ) + response = await client.get_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetSubpropertyEventFilterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" + + +@pytest.mark.asyncio +async def test_get_subproperty_event_filter_async_from_dict(): + await test_get_subproperty_event_filter_async(request_type=dict) + + +def test_get_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetSubpropertyEventFilterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_subproperty_event_filter), "__call__" ) as call: - call.return_value = None - client.delete_subproperty_event_filter(request) + call.return_value = subproperty_event_filter.SubpropertyEventFilter() + client.get_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -35019,23 +35304,25 @@ def test_delete_subproperty_event_filter_field_headers(): @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_field_headers_async(): +async def test_get_subproperty_event_filter_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.DeleteSubpropertyEventFilterRequest() + request = analytics_admin.GetSubpropertyEventFilterRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_subproperty_event_filter), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_subproperty_event_filter(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter() + ) + await client.get_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -35050,20 +35337,20 @@ async def test_delete_subproperty_event_filter_field_headers_async(): ) in kw["metadata"] -def test_delete_subproperty_event_filter_flattened(): +def test_get_subproperty_event_filter_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = subproperty_event_filter.SubpropertyEventFilter() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_subproperty_event_filter( + client.get_subproperty_event_filter( name="name_value", ) @@ -35076,7 +35363,7 @@ def test_delete_subproperty_event_filter_flattened(): assert arg == mock_val -def test_delete_subproperty_event_filter_flattened_error(): +def test_get_subproperty_event_filter_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35084,29 +35371,31 @@ def test_delete_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_subproperty_event_filter( - analytics_admin.DeleteSubpropertyEventFilterRequest(), + client.get_subproperty_event_filter( + analytics_admin.GetSubpropertyEventFilterRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_flattened_async(): +async def test_get_subproperty_event_filter_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_subproperty_event_filter), "__call__" + type(client.transport.get_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = subproperty_event_filter.SubpropertyEventFilter() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + subproperty_event_filter.SubpropertyEventFilter() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_subproperty_event_filter( + response = await client.get_subproperty_event_filter( name="name_value", ) @@ -35120,7 +35409,7 @@ async def test_delete_subproperty_event_filter_flattened_async(): @pytest.mark.asyncio -async def test_delete_subproperty_event_filter_flattened_error_async(): +async def test_get_subproperty_event_filter_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35128,8 +35417,8 @@ async def test_delete_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_subproperty_event_filter( - analytics_admin.DeleteSubpropertyEventFilterRequest(), + await client.get_subproperty_event_filter( + analytics_admin.GetSubpropertyEventFilterRequest(), name="name_value", ) @@ -35137,11 +35426,11 @@ async def test_delete_subproperty_event_filter_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSubpropertyEventFilterRequest, + analytics_admin.ListSubpropertyEventFiltersRequest, dict, ], ) -def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): +def test_list_subproperty_event_filters(request_type, transport: str = "grpc"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -35153,19 +35442,470 @@ def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubpropertyEventFiltersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_subproperty_event_filters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + client.list_subproperty_event_filters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.ListSubpropertyEventFiltersRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.ListSubpropertyEventFiltersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubpropertyEventFiltersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_from_dict(): + await test_list_subproperty_event_filters_async(request_type=dict) + + +def test_list_subproperty_event_filters_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListSubpropertyEventFiltersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.ListSubpropertyEventFiltersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse() + ) + await client.list_subproperty_event_filters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_subproperty_event_filters_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subproperty_event_filters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_subproperty_event_filters_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subproperty_event_filters( + analytics_admin.ListSubpropertyEventFiltersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_admin.ListSubpropertyEventFiltersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subproperty_event_filters( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subproperty_event_filters( + analytics_admin.ListSubpropertyEventFiltersRequest(), + parent="parent_value", + ) + + +def test_list_subproperty_event_filters_pager(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_subproperty_event_filters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, subproperty_event_filter.SubpropertyEventFilter) + for i in results + ) + + +def test_list_subproperty_event_filters_pages(transport_name: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subproperty_event_filters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_pager(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subproperty_event_filters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, subproperty_event_filter.SubpropertyEventFilter) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_subproperty_event_filters_async_pages(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subproperty_event_filters), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_subproperty_event_filters(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.UpdateSubpropertyEventFilterRequest, + dict, + ], +) +def test_update_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value", apply_to_property="apply_to_property_value", ) - response = client.create_subproperty_event_filter(request) + response = client.update_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() # Establish that the response is the type that we expect. assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) @@ -35173,7 +35913,7 @@ def test_create_subproperty_event_filter(request_type, transport: str = "grpc"): assert response.apply_to_property == "apply_to_property_value" -def test_create_subproperty_event_filter_empty_call(): +def test_update_subproperty_event_filter_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( @@ -35183,18 +35923,18 @@ def test_create_subproperty_event_filter_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: - client.create_subproperty_event_filter() + client.update_subproperty_event_filter() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() @pytest.mark.asyncio -async def test_create_subproperty_event_filter_async( +async def test_update_subproperty_event_filter_async( transport: str = "grpc_asyncio", - request_type=analytics_admin.CreateSubpropertyEventFilterRequest, + request_type=analytics_admin.UpdateSubpropertyEventFilterRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -35207,7 +35947,7 @@ async def test_create_subproperty_event_filter_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -35216,12 +35956,12 @@ async def test_create_subproperty_event_filter_async( apply_to_property="apply_to_property_value", ) ) - response = await client.create_subproperty_event_filter(request) + response = await client.update_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == analytics_admin.CreateSubpropertyEventFilterRequest() + assert args[0] == analytics_admin.UpdateSubpropertyEventFilterRequest() # Establish that the response is the type that we expect. assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) @@ -35230,27 +35970,27 @@ async def test_create_subproperty_event_filter_async( @pytest.mark.asyncio -async def test_create_subproperty_event_filter_async_from_dict(): - await test_create_subproperty_event_filter_async(request_type=dict) +async def test_update_subproperty_event_filter_async_from_dict(): + await test_update_subproperty_event_filter_async(request_type=dict) -def test_create_subproperty_event_filter_field_headers(): +def test_update_subproperty_event_filter_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateSubpropertyEventFilterRequest() - request.parent = "parent_value" + request.subproperty_event_filter.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() - client.create_subproperty_event_filter(request) + client.update_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -35261,30 +36001,30 @@ def test_create_subproperty_event_filter_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "subproperty_event_filter.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_subproperty_event_filter_field_headers_async(): +async def test_update_subproperty_event_filter_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateSubpropertyEventFilterRequest() - request.parent = "parent_value" + request.subproperty_event_filter.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gaa_subproperty_event_filter.SubpropertyEventFilter() ) - await client.create_subproperty_event_filter(request) + await client.update_subproperty_event_filter(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -35295,45 +36035,45 @@ async def test_create_subproperty_event_filter_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "subproperty_event_filter.name=name_value", ) in kw["metadata"] -def test_create_subproperty_event_filter_flattened(): +def test_update_subproperty_event_filter_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_subproperty_event_filter( - parent="parent_value", + client.update_subproperty_event_filter( subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val arg = args[0].subproperty_event_filter mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ) assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_create_subproperty_event_filter_flattened_error(): +def test_update_subproperty_event_filter_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35341,24 +36081,24 @@ def test_create_subproperty_event_filter_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_subproperty_event_filter( - analytics_admin.CreateSubpropertyEventFilterRequest(), - parent="parent_value", + client.update_subproperty_event_filter( + analytics_admin.UpdateSubpropertyEventFilterRequest(), subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_create_subproperty_event_filter_flattened_async(): +async def test_update_subproperty_event_filter_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_subproperty_event_filter), "__call__" + type(client.transport.update_subproperty_event_filter), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() @@ -35368,29 +36108,29 @@ async def test_create_subproperty_event_filter_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_subproperty_event_filter( - parent="parent_value", + response = await client.update_subproperty_event_filter( subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val arg = args[0].subproperty_event_filter mock_val = gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ) assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio -async def test_create_subproperty_event_filter_flattened_error_async(): +async def test_update_subproperty_event_filter_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -35398,12 +36138,247 @@ async def test_create_subproperty_event_filter_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_subproperty_event_filter( - analytics_admin.CreateSubpropertyEventFilterRequest(), - parent="parent_value", + await client.update_subproperty_event_filter( + analytics_admin.UpdateSubpropertyEventFilterRequest(), subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteSubpropertyEventFilterRequest, + dict, + ], +) +def test_delete_subproperty_event_filter(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subproperty_event_filter_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + client.delete_subproperty_event_filter() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.DeleteSubpropertyEventFilterRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_async_from_dict(): + await test_delete_subproperty_event_filter_async(request_type=dict) + + +def test_delete_subproperty_event_filter_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value = None + client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subproperty_event_filter(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_subproperty_event_filter_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_subproperty_event_filter_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subproperty_event_filter( + analytics_admin.DeleteSubpropertyEventFilterRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subproperty_event_filter), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subproperty_event_filter( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_subproperty_event_filter_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subproperty_event_filter( + analytics_admin.DeleteSubpropertyEventFilterRequest(), + name="name_value", ) @@ -45835,6 +46810,10 @@ def test_create_conversion_event_rest(request_type): "deletable": True, "custom": True, "counting_method": 1, + "default_conversion_value": { + "value": 0.541, + "currency_code": "currency_code_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -46207,6 +47186,10 @@ def test_update_conversion_event_rest(request_type): "deletable": True, "custom": True, "counting_method": 1, + "default_conversion_value": { + "value": 0.541, + "currency_code": "currency_code_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -68449,45 +69432,497 @@ def test_delete_connected_site_tag_rest_error(): ) -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.ListConnectedSiteTagsRequest, - dict, - ], -) -def test_list_connected_site_tags_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListConnectedSiteTagsRequest, + dict, + ], +) +def test_list_connected_site_tags_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.ListConnectedSiteTagsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.ListConnectedSiteTagsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_connected_site_tags(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connected_site_tags_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_list_connected_site_tags" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_connected_site_tags" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.ListConnectedSiteTagsRequest.pb( + analytics_admin.ListConnectedSiteTagsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.ListConnectedSiteTagsResponse.to_json( + analytics_admin.ListConnectedSiteTagsResponse() + ) + ) + + request = analytics_admin.ListConnectedSiteTagsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.ListConnectedSiteTagsResponse() + + client.list_connected_site_tags( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connected_site_tags_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListConnectedSiteTagsRequest +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connected_site_tags(request) + + +def test_list_connected_site_tags_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.FetchConnectedGa4PropertyRequest, + dict, + ], +) +def test_fetch_connected_ga4_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.FetchConnectedGa4PropertyResponse( + property="property_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_connected_ga4_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) + assert response.property == "property_value" + + +def test_fetch_connected_ga4_property_rest_required_fields( + request_type=analytics_admin.FetchConnectedGa4PropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "property" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "property" in jsonified_request + assert jsonified_request["property"] == request_init["property"] + + jsonified_request["property"] = "property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("property",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_connected_ga4_property(request) + + expected_params = [ + ( + "property", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_connected_ga4_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_connected_ga4_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(("property",)) & set(("property",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_fetch_connected_ga4_property", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_fetch_connected_ga4_property", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.FetchConnectedGa4PropertyRequest.pb( + analytics_admin.FetchConnectedGa4PropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.FetchConnectedGa4PropertyResponse.to_json( + analytics_admin.FetchConnectedGa4PropertyResponse() + ) + ) + + request = analytics_admin.FetchConnectedGa4PropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + + client.fetch_connected_ga4_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_connected_ga4_property_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.FetchConnectedGa4PropertyRequest, +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_connected_ga4_property(request) + + +def test_fetch_connected_ga4_property_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetAdSenseLinkRequest, + dict, + ], +) +def test_get_ad_sense_link_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink( + name="name_value", + ad_client_code="ad_client_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_ad_sense_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AdSenseLink) + assert response.name == "name_value" + assert response.ad_client_code == "ad_client_code_value" + + +def test_get_ad_sense_link_rest_required_fields( + request_type=analytics_admin.GetAdSenseLinkRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) + response = client.get_ad_sense_link(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListConnectedSiteTagsResponse() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListConnectedSiteTagsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_connected_site_tags(request) +def test_get_ad_sense_link_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.ListConnectedSiteTagsResponse) + unset_fields = transport.get_ad_sense_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connected_site_tags_rest_interceptors(null_interceptor): +def test_get_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -68500,14 +69935,14 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_connected_site_tags" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_ad_sense_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_connected_site_tags" + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_ad_sense_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListConnectedSiteTagsRequest.pb( - analytics_admin.ListConnectedSiteTagsRequest() + pb_message = analytics_admin.GetAdSenseLinkRequest.pb( + analytics_admin.GetAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -68519,21 +69954,19 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListConnectedSiteTagsResponse.to_json( - analytics_admin.ListConnectedSiteTagsResponse() - ) + req.return_value._content = resources.AdSenseLink.to_json( + resources.AdSenseLink() ) - request = analytics_admin.ListConnectedSiteTagsRequest() + request = analytics_admin.GetAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListConnectedSiteTagsResponse() + post.return_value = resources.AdSenseLink() - client.list_connected_site_tags( + client.get_ad_sense_link( request, metadata=[ ("key", "val"), @@ -68545,8 +69978,8 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_connected_site_tags_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListConnectedSiteTagsRequest +def test_get_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68554,7 +69987,7 @@ def test_list_connected_site_tags_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "properties/sample1/adSenseLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -68566,10 +69999,66 @@ def test_list_connected_site_tags_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_connected_site_tags(request) + client.get_ad_sense_link(request) -def test_list_connected_site_tags_rest_error(): +def test_get_ad_sense_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_ad_sense_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + args[1], + ) + + +def test_get_ad_sense_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_ad_sense_link( + analytics_admin.GetAdSenseLinkRequest(), + name="name_value", + ) + + +def test_get_ad_sense_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -68578,52 +70067,123 @@ def test_list_connected_site_tags_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.FetchConnectedGa4PropertyRequest, + analytics_admin.CreateAdSenseLinkRequest, dict, ], ) -def test_fetch_connected_ga4_property_rest(request_type): +def test_create_ad_sense_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} + request_init["adsense_link"] = { + "name": "name_value", + "ad_client_code": "ad_client_code_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateAdSenseLinkRequest.meta.fields["adsense_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["adsense_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["adsense_link"][field])): + del request_init["adsense_link"][field][i][subfield] + else: + del request_init["adsense_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchConnectedGa4PropertyResponse( - property="property_value", + return_value = resources.AdSenseLink( + name="name_value", + ad_client_code="ad_client_code_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( - return_value - ) + return_value = resources.AdSenseLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_connected_ga4_property(request) + response = client.create_ad_sense_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.FetchConnectedGa4PropertyResponse) - assert response.property == "property_value" + assert isinstance(response, resources.AdSenseLink) + assert response.name == "name_value" + assert response.ad_client_code == "ad_client_code_value" -def test_fetch_connected_ga4_property_rest_required_fields( - request_type=analytics_admin.FetchConnectedGa4PropertyRequest, +def test_create_ad_sense_link_rest_required_fields( + request_type=analytics_admin.CreateAdSenseLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["property"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -68635,29 +70195,24 @@ def test_fetch_connected_ga4_property_rest_required_fields( ) # verify fields with default values are dropped - assert "property" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) + ).create_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "property" in jsonified_request - assert jsonified_request["property"] == request_init["property"] - jsonified_request["property"] = "property_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).fetch_connected_ga4_property._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("property",)) + ).create_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "property" in jsonified_request - assert jsonified_request["property"] == "property_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68666,7 +70221,7 @@ def test_fetch_connected_ga4_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + return_value = resources.AdSenseLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -68678,47 +70233,48 @@ def test_fetch_connected_ga4_property_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.FetchConnectedGa4PropertyResponse.pb( - return_value - ) + return_value = resources.AdSenseLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.fetch_connected_ga4_property(request) + response = client.create_ad_sense_link(request) - expected_params = [ - ( - "property", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_fetch_connected_ga4_property_rest_unset_required_fields(): +def test_create_ad_sense_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.fetch_connected_ga4_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(("property",)) & set(("property",))) + unset_fields = transport.create_ad_sense_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "adsenseLink", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): +def test_create_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -68731,16 +70287,14 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_fetch_connected_ga4_property", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_ad_sense_link" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_fetch_connected_ga4_property", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_ad_sense_link" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.FetchConnectedGa4PropertyRequest.pb( - analytics_admin.FetchConnectedGa4PropertyRequest() + pb_message = analytics_admin.CreateAdSenseLinkRequest.pb( + analytics_admin.CreateAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -68752,21 +70306,19 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.FetchConnectedGa4PropertyResponse.to_json( - analytics_admin.FetchConnectedGa4PropertyResponse() - ) + req.return_value._content = resources.AdSenseLink.to_json( + resources.AdSenseLink() ) - request = analytics_admin.FetchConnectedGa4PropertyRequest() + request = analytics_admin.CreateAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + post.return_value = resources.AdSenseLink() - client.fetch_connected_ga4_property( + client.create_ad_sense_link( request, metadata=[ ("key", "val"), @@ -68778,9 +70330,8 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_fetch_connected_ga4_property_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.FetchConnectedGa4PropertyRequest, +def test_create_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -68788,7 +70339,7 @@ def test_fetch_connected_ga4_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -68800,10 +70351,68 @@ def test_fetch_connected_ga4_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.fetch_connected_ga4_property(request) + client.create_ad_sense_link(request) -def test_fetch_connected_ga4_property_rest_error(): +def test_create_ad_sense_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AdSenseLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AdSenseLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_ad_sense_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + args[1], + ) + + +def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_ad_sense_link( + analytics_admin.CreateAdSenseLinkRequest(), + parent="parent_value", + adsense_link=resources.AdSenseLink(name="name_value"), + ) + + +def test_create_ad_sense_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -68812,11 +70421,11 @@ def test_fetch_connected_ga4_property_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetAdSenseLinkRequest, + analytics_admin.DeleteAdSenseLinkRequest, dict, ], ) -def test_get_ad_sense_link_rest(request_type): +def test_delete_ad_sense_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -68829,30 +70438,23 @@ def test_get_ad_sense_link_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_sense_link(request) + response = client.delete_ad_sense_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) - assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert response is None -def test_get_ad_sense_link_rest_required_fields( - request_type=analytics_admin.GetAdSenseLinkRequest, +def test_delete_ad_sense_link_rest_required_fields( + request_type=analytics_admin.DeleteAdSenseLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -68872,7 +70474,7 @@ def test_get_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -68881,7 +70483,7 @@ def test_get_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_sense_link._get_unset_required_fields(jsonified_request) + ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -68895,7 +70497,7 @@ def test_get_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -68907,39 +70509,36 @@ def test_get_ad_sense_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_sense_link(request) + response = client.delete_ad_sense_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_ad_sense_link_rest_unset_required_fields(): +def test_delete_ad_sense_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_ad_sense_link._get_unset_required_fields({}) + unset_fields = transport.delete_ad_sense_link._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_ad_sense_link_rest_interceptors(null_interceptor): +def test_delete_ad_sense_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -68952,14 +70551,11 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_ad_sense_link" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_ad_sense_link" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.GetAdSenseLinkRequest.pb( - analytics_admin.GetAdSenseLinkRequest() + pb_message = analytics_admin.DeleteAdSenseLinkRequest.pb( + analytics_admin.DeleteAdSenseLinkRequest() ) transcode.return_value = { "method": "post", @@ -68971,19 +70567,15 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AdSenseLink.to_json( - resources.AdSenseLink() - ) - request = analytics_admin.GetAdSenseLinkRequest() + request = analytics_admin.DeleteAdSenseLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AdSenseLink() - client.get_ad_sense_link( + client.delete_ad_sense_link( request, metadata=[ ("key", "val"), @@ -68992,11 +70584,10 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetAdSenseLinkRequest +def test_delete_ad_sense_link_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteAdSenseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69016,10 +70607,10 @@ def test_get_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_ad_sense_link(request) + client.delete_ad_sense_link(request) -def test_get_ad_sense_link_rest_flattened(): +def test_delete_ad_sense_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69028,7 +70619,7 @@ def test_get_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = None # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} @@ -69042,13 +70633,11 @@ def test_get_ad_sense_link_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_ad_sense_link(**mock_args) + client.delete_ad_sense_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -69060,142 +70649,69 @@ def test_get_ad_sense_link_rest_flattened(): ) -def test_get_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_ad_sense_link( - analytics_admin.GetAdSenseLinkRequest(), - name="name_value", - ) - - -def test_get_ad_sense_link_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.CreateAdSenseLinkRequest, - dict, - ], -) -def test_create_ad_sense_link_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["adsense_link"] = { - "name": "name_value", - "ad_client_code": "ad_client_code_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateAdSenseLinkRequest.meta.fields["adsense_link"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_ad_sense_link( + analytics_admin.DeleteAdSenseLinkRequest(), + name="name_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["adsense_link"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_delete_ad_sense_link_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["adsense_link"][field])): - del request_init["adsense_link"][field][i][subfield] - else: - del request_init["adsense_link"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.ListAdSenseLinksRequest, + dict, + ], +) +def test_list_ad_sense_links_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink( - name="name_value", - ad_client_code="ad_client_code_value", + return_value = analytics_admin.ListAdSenseLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_ad_sense_link(request) + response = client.list_ad_sense_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.AdSenseLink) - assert response.name == "name_value" - assert response.ad_client_code == "ad_client_code_value" + assert isinstance(response, pagers.ListAdSenseLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_create_ad_sense_link_rest_required_fields( - request_type=analytics_admin.CreateAdSenseLinkRequest, +def test_list_ad_sense_links_rest_required_fields( + request_type=analytics_admin.ListAdSenseLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -69215,7 +70731,7 @@ def test_create_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + ).list_ad_sense_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -69224,7 +70740,14 @@ def test_create_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_ad_sense_link._get_unset_required_fields(jsonified_request) + ).list_ad_sense_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -69238,7 +70761,7 @@ def test_create_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = analytics_admin.ListAdSenseLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69250,48 +70773,47 @@ def test_create_ad_sense_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_ad_sense_link(request) + response = client.list_ad_sense_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_ad_sense_link_rest_unset_required_fields(): +def test_list_ad_sense_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_ad_sense_link._get_unset_required_fields({}) + unset_fields = transport.list_ad_sense_links._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "adsenseLink", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_ad_sense_link_rest_interceptors(null_interceptor): +def test_list_ad_sense_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69304,14 +70826,14 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_ad_sense_links" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_ad_sense_links" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateAdSenseLinkRequest.pb( - analytics_admin.CreateAdSenseLinkRequest() + pb_message = analytics_admin.ListAdSenseLinksRequest.pb( + analytics_admin.ListAdSenseLinksRequest() ) transcode.return_value = { "method": "post", @@ -69323,19 +70845,19 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.AdSenseLink.to_json( - resources.AdSenseLink() + req.return_value._content = analytics_admin.ListAdSenseLinksResponse.to_json( + analytics_admin.ListAdSenseLinksResponse() ) - request = analytics_admin.CreateAdSenseLinkRequest() + request = analytics_admin.ListAdSenseLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.AdSenseLink() + post.return_value = analytics_admin.ListAdSenseLinksResponse() - client.create_ad_sense_link( + client.list_ad_sense_links( request, metadata=[ ("key", "val"), @@ -69347,8 +70869,8 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateAdSenseLinkRequest +def test_list_ad_sense_links_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListAdSenseLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69368,10 +70890,10 @@ def test_create_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_ad_sense_link(request) + client.list_ad_sense_links(request) -def test_create_ad_sense_link_rest_flattened(): +def test_list_ad_sense_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69380,7 +70902,7 @@ def test_create_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AdSenseLink() + return_value = analytics_admin.ListAdSenseLinksResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -69388,7 +70910,6 @@ def test_create_ad_sense_link_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) mock_args.update(sample_request) @@ -69396,12 +70917,12 @@ def test_create_ad_sense_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.AdSenseLink.pb(return_value) + return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_ad_sense_link(**mock_args) + client.list_ad_sense_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -69413,7 +70934,7 @@ def test_create_ad_sense_link_rest_flattened(): ) -def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69422,56 +70943,123 @@ def test_create_ad_sense_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_ad_sense_link( - analytics_admin.CreateAdSenseLinkRequest(), + client.list_ad_sense_links( + analytics_admin.ListAdSenseLinksRequest(), parent="parent_value", - adsense_link=resources.AdSenseLink(name="name_value"), ) -def test_create_ad_sense_link_rest_error(): +def test_list_ad_sense_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[], + next_page_token="def", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListAdSenseLinksResponse( + adsense_links=[ + resources.AdSenseLink(), + resources.AdSenseLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListAdSenseLinksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_ad_sense_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AdSenseLink) for i in results) + + pages = list(client.list_ad_sense_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteAdSenseLinkRequest, + analytics_admin.GetEventCreateRuleRequest, dict, ], ) -def test_delete_ad_sense_link_rest(request_type): +def test_get_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_ad_sense_link(request) + response = client.get_event_create_rule(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_delete_ad_sense_link_rest_required_fields( - request_type=analytics_admin.DeleteAdSenseLinkRequest, +def test_get_event_create_rule_rest_required_fields( + request_type=analytics_admin.GetEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -69491,7 +71079,7 @@ def test_delete_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + ).get_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -69500,7 +71088,7 @@ def test_delete_ad_sense_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_ad_sense_link._get_unset_required_fields(jsonified_request) + ).get_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -69514,7 +71102,7 @@ def test_delete_ad_sense_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69526,36 +71114,39 @@ def test_delete_ad_sense_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_ad_sense_link(request) + response = client.get_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_ad_sense_link_rest_unset_required_fields(): +def test_get_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_ad_sense_link._get_unset_required_fields({}) + unset_fields = transport.get_event_create_rule._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_ad_sense_link_rest_interceptors(null_interceptor): +def test_get_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69568,11 +71159,14 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_ad_sense_link" + transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_create_rule" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_create_rule" ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteAdSenseLinkRequest.pb( - analytics_admin.DeleteAdSenseLinkRequest() + post.assert_not_called() + pb_message = analytics_admin.GetEventCreateRuleRequest.pb( + analytics_admin.GetEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -69584,15 +71178,19 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = event_create_and_edit.EventCreateRule.to_json( + event_create_and_edit.EventCreateRule() + ) - request = analytics_admin.DeleteAdSenseLinkRequest() + request = analytics_admin.GetEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = event_create_and_edit.EventCreateRule() - client.delete_ad_sense_link( + client.get_event_create_rule( request, metadata=[ ("key", "val"), @@ -69601,10 +71199,11 @@ def test_delete_ad_sense_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_ad_sense_link_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteAdSenseLinkRequest +def test_get_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.GetEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69612,7 +71211,9 @@ def test_delete_ad_sense_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/adSenseLinks/sample2"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -69624,10 +71225,10 @@ def test_delete_ad_sense_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_ad_sense_link(request) + client.get_event_create_rule(request) -def test_delete_ad_sense_link_rest_flattened(): +def test_get_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69636,10 +71237,12 @@ def test_delete_ad_sense_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/adSenseLinks/sample2"} + sample_request = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -69650,23 +71253,26 @@ def test_delete_ad_sense_link_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = event_create_and_edit.EventCreateRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_ad_sense_link(**mock_args) + client.get_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/adSenseLinks/*}" % client.transport._host, + "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + % client.transport._host, args[1], ) -def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): +def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69675,13 +71281,13 @@ def test_delete_ad_sense_link_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_ad_sense_link( - analytics_admin.DeleteAdSenseLinkRequest(), + client.get_event_create_rule( + analytics_admin.GetEventCreateRuleRequest(), name="name_value", ) -def test_delete_ad_sense_link_rest_error(): +def test_get_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -69690,24 +71296,24 @@ def test_delete_ad_sense_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListAdSenseLinksRequest, + analytics_admin.ListEventCreateRulesRequest, dict, ], ) -def test_list_ad_sense_links_rest(request_type): +def test_list_event_create_rules_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse( + return_value = analytics_admin.ListEventCreateRulesResponse( next_page_token="next_page_token_value", ) @@ -69715,20 +71321,20 @@ def test_list_ad_sense_links_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_sense_links(request) + response = client.list_event_create_rules(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdSenseLinksPager) + assert isinstance(response, pagers.ListEventCreateRulesPager) assert response.next_page_token == "next_page_token_value" -def test_list_ad_sense_links_rest_required_fields( - request_type=analytics_admin.ListAdSenseLinksRequest, +def test_list_event_create_rules_rest_required_fields( + request_type=analytics_admin.ListEventCreateRulesRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -69748,7 +71354,7 @@ def test_list_ad_sense_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_sense_links._get_unset_required_fields(jsonified_request) + ).list_event_create_rules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -69757,7 +71363,7 @@ def test_list_ad_sense_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_sense_links._get_unset_required_fields(jsonified_request) + ).list_event_create_rules._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -69778,7 +71384,7 @@ def test_list_ad_sense_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse() + return_value = analytics_admin.ListEventCreateRulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -69799,25 +71405,25 @@ def test_list_ad_sense_links_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_sense_links(request) + response = client.list_event_create_rules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_ad_sense_links_rest_unset_required_fields(): +def test_list_event_create_rules_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_ad_sense_links._get_unset_required_fields({}) + unset_fields = transport.list_event_create_rules._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -69830,7 +71436,7 @@ def test_list_ad_sense_links_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_ad_sense_links_rest_interceptors(null_interceptor): +def test_list_event_create_rules_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -69843,14 +71449,14 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_ad_sense_links" + transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_create_rules" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_ad_sense_links" + transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_create_rules" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListAdSenseLinksRequest.pb( - analytics_admin.ListAdSenseLinksRequest() + pb_message = analytics_admin.ListEventCreateRulesRequest.pb( + analytics_admin.ListEventCreateRulesRequest() ) transcode.return_value = { "method": "post", @@ -69862,19 +71468,21 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.ListAdSenseLinksResponse.to_json( - analytics_admin.ListAdSenseLinksResponse() + req.return_value._content = ( + analytics_admin.ListEventCreateRulesResponse.to_json( + analytics_admin.ListEventCreateRulesResponse() + ) ) - request = analytics_admin.ListAdSenseLinksRequest() + request = analytics_admin.ListEventCreateRulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListAdSenseLinksResponse() + post.return_value = analytics_admin.ListEventCreateRulesResponse() - client.list_ad_sense_links( + client.list_event_create_rules( request, metadata=[ ("key", "val"), @@ -69886,8 +71494,8 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_ad_sense_links_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListAdSenseLinksRequest +def test_list_event_create_rules_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.ListEventCreateRulesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -69895,7 +71503,7 @@ def test_list_ad_sense_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -69907,10 +71515,10 @@ def test_list_ad_sense_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_ad_sense_links(request) + client.list_event_create_rules(request) -def test_list_ad_sense_links_rest_flattened(): +def test_list_event_create_rules_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -69919,10 +71527,10 @@ def test_list_ad_sense_links_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListAdSenseLinksResponse() + return_value = analytics_admin.ListEventCreateRulesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -69934,24 +71542,25 @@ def test_list_ad_sense_links_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListAdSenseLinksResponse.pb(return_value) + return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_ad_sense_links(**mock_args) + client.list_event_create_rules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/adSenseLinks" % client.transport._host, + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + % client.transport._host, args[1], ) -def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): +def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69960,13 +71569,13 @@ def test_list_ad_sense_links_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_ad_sense_links( - analytics_admin.ListAdSenseLinksRequest(), + client.list_event_create_rules( + analytics_admin.ListEventCreateRulesRequest(), parent="parent_value", ) -def test_list_ad_sense_links_rest_pager(transport: str = "rest"): +def test_list_event_create_rules_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -69978,28 +71587,28 @@ def test_list_ad_sense_links_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), - resources.AdSenseLink(), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), ], next_page_token="abc", ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[], + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[], next_page_token="def", ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), ], next_page_token="ghi", ), - analytics_admin.ListAdSenseLinksResponse( - adsense_links=[ - resources.AdSenseLink(), - resources.AdSenseLink(), + analytics_admin.ListEventCreateRulesResponse( + event_create_rules=[ + event_create_and_edit.EventCreateRule(), + event_create_and_edit.EventCreateRule(), ], ), ) @@ -70008,7 +71617,7 @@ def test_list_ad_sense_links_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - analytics_admin.ListAdSenseLinksResponse.to_json(x) for x in response + analytics_admin.ListEventCreateRulesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -70016,15 +71625,17 @@ def test_list_ad_sense_links_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "properties/sample1"} + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - pager = client.list_ad_sense_links(request=sample_request) + pager = client.list_event_create_rules(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.AdSenseLink) for i in results) + assert all( + isinstance(i, event_create_and_edit.EventCreateRule) for i in results + ) - pages = list(client.list_ad_sense_links(request=sample_request).pages) + pages = list(client.list_event_create_rules(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -70032,20 +71643,103 @@ def test_list_ad_sense_links_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetEventCreateRuleRequest, + analytics_admin.CreateEventCreateRuleRequest, dict, ], ) -def test_get_event_create_rule_rest(request_type): +def test_create_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" - } + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init["event_create_rule"] = { + "name": "name_value", + "destination_event": "destination_event_value", + "event_conditions": [ + { + "field": "field_value", + "comparison_type": 1, + "value": "value_value", + "negated": True, + } + ], + "source_copy_parameters": True, + "parameter_mutations": [ + {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateEventCreateRuleRequest.meta.fields[ + "event_create_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event_create_rule"][field])): + del request_init["event_create_rule"][field][i][subfield] + else: + del request_init["event_create_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -70066,7 +71760,7 @@ def test_get_event_create_rule_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_create_rule(request) + response = client.create_event_create_rule(request) # Establish that the response is the type that we expect. assert isinstance(response, event_create_and_edit.EventCreateRule) @@ -70075,13 +71769,13 @@ def test_get_event_create_rule_rest(request_type): assert response.source_copy_parameters is True -def test_get_event_create_rule_rest_required_fields( - request_type=analytics_admin.GetEventCreateRuleRequest, +def test_create_event_create_rule_rest_required_fields( + request_type=analytics_admin.CreateEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -70096,21 +71790,21 @@ def test_get_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_create_rule._get_unset_required_fields(jsonified_request) + ).create_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_event_create_rule._get_unset_required_fields(jsonified_request) + ).create_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70131,9 +71825,10 @@ def test_get_event_create_rule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -70146,24 +71841,32 @@ def test_get_event_create_rule_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_event_create_rule(request) + response = client.create_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_event_create_rule_rest_unset_required_fields(): +def test_create_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_event_create_rule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_event_create_rule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "eventCreateRule", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_event_create_rule_rest_interceptors(null_interceptor): +def test_create_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70176,14 +71879,14 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_create_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetEventCreateRuleRequest.pb( - analytics_admin.GetEventCreateRuleRequest() + pb_message = analytics_admin.CreateEventCreateRuleRequest.pb( + analytics_admin.CreateEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -70199,7 +71902,7 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): event_create_and_edit.EventCreateRule() ) - request = analytics_admin.GetEventCreateRuleRequest() + request = analytics_admin.CreateEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -70207,7 +71910,7 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = event_create_and_edit.EventCreateRule() - client.get_event_create_rule( + client.create_event_create_rule( request, metadata=[ ("key", "val"), @@ -70219,8 +71922,8 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.GetEventCreateRuleRequest +def test_create_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70228,9 +71931,7 @@ def test_get_event_create_rule_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" - } + request_init = {"parent": "properties/sample1/dataStreams/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -70242,10 +71943,10 @@ def test_get_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_event_create_rule(request) + client.create_event_create_rule(request) -def test_get_event_create_rule_rest_flattened(): +def test_create_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70257,13 +71958,12 @@ def test_get_event_create_rule_rest_flattened(): return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" - } + sample_request = {"parent": "properties/sample1/dataStreams/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) mock_args.update(sample_request) @@ -70276,20 +71976,20 @@ def test_get_event_create_rule_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_event_create_rule(**mock_args) + client.create_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" + "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" % client.transport._host, args[1], ) -def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70298,13 +71998,14 @@ def test_get_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_event_create_rule( - analytics_admin.GetEventCreateRuleRequest(), - name="name_value", + client.create_event_create_rule( + analytics_admin.CreateEventCreateRuleRequest(), + parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), ) -def test_get_event_create_rule_rest_error(): +def test_create_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -70313,50 +72014,142 @@ def test_get_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListEventCreateRulesRequest, + analytics_admin.UpdateEventCreateRuleRequest, dict, ], ) -def test_list_event_create_rules_rest(request_type): +def test_update_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } + request_init["event_create_rule"] = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3", + "destination_event": "destination_event_value", + "event_conditions": [ + { + "field": "field_value", + "comparison_type": 1, + "value": "value_value", + "negated": True, + } + ], + "source_copy_parameters": True, + "parameter_mutations": [ + {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.UpdateEventCreateRuleRequest.meta.fields[ + "event_create_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event_create_rule"][field])): + del request_init["event_create_rule"][field][i][subfield] + else: + del request_init["event_create_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse( - next_page_token="next_page_token_value", + return_value = event_create_and_edit.EventCreateRule( + name="name_value", + destination_event="destination_event_value", + source_copy_parameters=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_create_rules(request) + response = client.update_event_create_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventCreateRulesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, event_create_and_edit.EventCreateRule) + assert response.name == "name_value" + assert response.destination_event == "destination_event_value" + assert response.source_copy_parameters is True -def test_list_event_create_rules_rest_required_fields( - request_type=analytics_admin.ListEventCreateRulesRequest, +def test_update_event_create_rule_rest_required_fields( + request_type=analytics_admin.UpdateEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -70371,28 +72164,19 @@ def test_list_event_create_rules_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_create_rules._get_unset_required_fields(jsonified_request) + ).update_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_event_create_rules._get_unset_required_fields(jsonified_request) + ).update_event_create_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70401,7 +72185,7 @@ def test_list_event_create_rules_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse() + return_value = event_create_and_edit.EventCreateRule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -70413,47 +72197,48 @@ def test_list_event_create_rules_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_event_create_rules(request) + response = client.update_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_event_create_rules_rest_unset_required_fields(): +def test_update_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_event_create_rules._get_unset_required_fields({}) + unset_fields = transport.update_event_create_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "pageSize", - "pageToken", + "eventCreateRule", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_event_create_rules_rest_interceptors(null_interceptor): +def test_update_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70466,14 +72251,14 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_create_rules" + transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_create_rule" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_create_rules" + transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListEventCreateRulesRequest.pb( - analytics_admin.ListEventCreateRulesRequest() + pb_message = analytics_admin.UpdateEventCreateRuleRequest.pb( + analytics_admin.UpdateEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -70485,21 +72270,19 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListEventCreateRulesResponse.to_json( - analytics_admin.ListEventCreateRulesResponse() - ) + req.return_value._content = event_create_and_edit.EventCreateRule.to_json( + event_create_and_edit.EventCreateRule() ) - request = analytics_admin.ListEventCreateRulesRequest() + request = analytics_admin.UpdateEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListEventCreateRulesResponse() + post.return_value = event_create_and_edit.EventCreateRule() - client.list_event_create_rules( + client.update_event_create_rule( request, metadata=[ ("key", "val"), @@ -70511,8 +72294,8 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_event_create_rules_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.ListEventCreateRulesRequest +def test_update_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.UpdateEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70520,7 +72303,11 @@ def test_list_event_create_rules_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -70532,10 +72319,10 @@ def test_list_event_create_rules_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_event_create_rules(request) + client.update_event_create_rule(request) -def test_list_event_create_rules_rest_flattened(): +def test_update_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70544,14 +72331,19 @@ def test_list_event_create_rules_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListEventCreateRulesResponse() + return_value = event_create_and_edit.EventCreateRule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = { + "event_create_rule": { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -70559,25 +72351,25 @@ def test_list_event_create_rules_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListEventCreateRulesResponse.pb(return_value) + return_value = event_create_and_edit.EventCreateRule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_event_create_rules(**mock_args) + client.update_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + "%s/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}" % client.transport._host, args[1], ) -def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): +def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -70586,213 +72378,63 @@ def test_list_event_create_rules_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_event_create_rules( - analytics_admin.ListEventCreateRulesRequest(), - parent="parent_value", + client.update_event_create_rule( + analytics_admin.UpdateEventCreateRuleRequest(), + event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_event_create_rules_rest_pager(transport: str = "rest"): +def test_update_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - next_page_token="abc", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[], - next_page_token="def", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - ], - next_page_token="ghi", - ), - analytics_admin.ListEventCreateRulesResponse( - event_create_rules=[ - event_create_and_edit.EventCreateRule(), - event_create_and_edit.EventCreateRule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListEventCreateRulesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} - - pager = client.list_event_create_rules(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, event_create_and_edit.EventCreateRule) for i in results - ) - - pages = list(client.list_event_create_rules(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateEventCreateRuleRequest, + analytics_admin.DeleteEventCreateRuleRequest, dict, ], ) -def test_create_event_create_rule_rest(request_type): +def test_delete_event_create_rule_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} - request_init["event_create_rule"] = { - "name": "name_value", - "destination_event": "destination_event_value", - "event_conditions": [ - { - "field": "field_value", - "comparison_type": 1, - "value": "value_value", - "negated": True, - } - ], - "source_copy_parameters": True, - "parameter_mutations": [ - {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} - ], + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateEventCreateRuleRequest.meta.fields[ - "event_create_rule" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["event_create_rule"][field])): - del request_init["event_create_rule"][field][i][subfield] - else: - del request_init["event_create_rule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule( - name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_create_rule(request) + response = client.delete_event_create_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) - assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response is None -def test_create_event_create_rule_rest_required_fields( - request_type=analytics_admin.CreateEventCreateRuleRequest, +def test_delete_event_create_rule_rest_required_fields( + request_type=analytics_admin.DeleteEventCreateRuleRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -70807,21 +72449,21 @@ def test_create_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_event_create_rule._get_unset_required_fields(jsonified_request) + ).delete_event_create_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70830,7 +72472,7 @@ def test_create_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -70842,48 +72484,36 @@ def test_create_event_create_rule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_event_create_rule(request) + response = client.delete_event_create_rule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_event_create_rule_rest_unset_required_fields(): +def test_delete_event_create_rule_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_event_create_rule._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "eventCreateRule", - ) - ) - ) + unset_fields = transport.delete_event_create_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_event_create_rule_rest_interceptors(null_interceptor): +def test_delete_event_create_rule_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -70896,14 +72526,11 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_create_rule" - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_event_create_rule" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.CreateEventCreateRuleRequest.pb( - analytics_admin.CreateEventCreateRuleRequest() + pb_message = analytics_admin.DeleteEventCreateRuleRequest.pb( + analytics_admin.DeleteEventCreateRuleRequest() ) transcode.return_value = { "method": "post", @@ -70915,19 +72542,15 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = event_create_and_edit.EventCreateRule.to_json( - event_create_and_edit.EventCreateRule() - ) - request = analytics_admin.CreateEventCreateRuleRequest() + request = analytics_admin.DeleteEventCreateRuleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = event_create_and_edit.EventCreateRule() - client.create_event_create_rule( + client.delete_event_create_rule( request, metadata=[ ("key", "val"), @@ -70936,11 +72559,10 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateEventCreateRuleRequest +def test_delete_event_create_rule_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.DeleteEventCreateRuleRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -70948,7 +72570,9 @@ def test_create_event_create_rule_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1/dataStreams/sample2"} + request_init = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -70960,10 +72584,10 @@ def test_create_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_event_create_rule(request) + client.delete_event_create_rule(request) -def test_create_event_create_rule_rest_flattened(): +def test_delete_event_create_rule_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -70972,41 +72596,40 @@ def test_create_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1/dataStreams/sample2"} + sample_request = { + "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_event_create_rule(**mock_args) + client.delete_event_create_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*/dataStreams/*}/eventCreateRules" + "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" % client.transport._host, args[1], ) -def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_delete_event_create_rule_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -71015,14 +72638,13 @@ def test_create_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_event_create_rule( - analytics_admin.CreateEventCreateRuleRequest(), - parent="parent_value", - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + client.delete_event_create_rule( + analytics_admin.DeleteEventCreateRuleRequest(), + name="name_value", ) -def test_create_event_create_rule_rest_error(): +def test_delete_event_create_rule_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -71031,11 +72653,11 @@ def test_create_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateEventCreateRuleRequest, + analytics_admin.UpdateDataRedactionSettingsRequest, dict, ], ) -def test_update_event_create_rule_rest(request_type): +def test_update_data_redaction_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71043,24 +72665,17 @@ def test_update_event_create_rule_rest(request_type): # send a request that will satisfy transcoding request_init = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "data_redaction_settings": { + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } } - request_init["event_create_rule"] = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3", - "destination_event": "destination_event_value", - "event_conditions": [ - { - "field": "field_value", - "comparison_type": 1, - "value": "value_value", - "negated": True, - } - ], - "source_copy_parameters": True, - "parameter_mutations": [ - {"parameter": "parameter_value", "parameter_value": "parameter_value_value"} + request_init["data_redaction_settings"] = { + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings", + "email_redaction_enabled": True, + "query_parameter_redaction_enabled": True, + "query_parameter_keys": [ + "query_parameter_keys_value1", + "query_parameter_keys_value2", ], } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -71068,8 +72683,8 @@ def test_update_event_create_rule_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateEventCreateRuleRequest.meta.fields[ - "event_create_rule" + test_field = analytics_admin.UpdateDataRedactionSettingsRequest.meta.fields[ + "data_redaction_settings" ] def get_message_fields(field): @@ -71098,7 +72713,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["event_create_rule"].items(): # pragma: NO COVER + for field, value in request_init[ + "data_redaction_settings" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -71128,41 +72745,43 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["event_create_rule"][field])): - del request_init["event_create_rule"][field][i][subfield] + for i in range(0, len(request_init["data_redaction_settings"][field])): + del request_init["data_redaction_settings"][field][i][subfield] else: - del request_init["event_create_rule"][field][subfield] + del request_init["data_redaction_settings"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule( + return_value = resources.DataRedactionSettings( name="name_value", - destination_event="destination_event_value", - source_copy_parameters=True, + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = resources.DataRedactionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_create_rule(request) + response = client.update_data_redaction_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, event_create_and_edit.EventCreateRule) + assert isinstance(response, resources.DataRedactionSettings) assert response.name == "name_value" - assert response.destination_event == "destination_event_value" - assert response.source_copy_parameters is True + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] -def test_update_event_create_rule_rest_required_fields( - request_type=analytics_admin.UpdateEventCreateRuleRequest, +def test_update_data_redaction_settings_rest_required_fields( + request_type=analytics_admin.UpdateDataRedactionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -71181,14 +72800,14 @@ def test_update_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_create_rule._get_unset_required_fields(jsonified_request) + ).update_data_redaction_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_event_create_rule._get_unset_required_fields(jsonified_request) + ).update_data_redaction_settings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -71202,7 +72821,7 @@ def test_update_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = resources.DataRedactionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -71224,30 +72843,32 @@ def test_update_event_create_rule_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = resources.DataRedactionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_event_create_rule(request) + response = client.update_data_redaction_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_event_create_rule_rest_unset_required_fields(): +def test_update_data_redaction_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.update_data_redaction_settings._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set(("updateMask",)) & set( ( - "eventCreateRule", + "dataRedactionSettings", "updateMask", ) ) @@ -71255,7 +72876,7 @@ def test_update_event_create_rule_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_event_create_rule_rest_interceptors(null_interceptor): +def test_update_data_redaction_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -71268,14 +72889,16 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_redaction_settings", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_update_data_redaction_settings", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateEventCreateRuleRequest.pb( - analytics_admin.UpdateEventCreateRuleRequest() + pb_message = analytics_admin.UpdateDataRedactionSettingsRequest.pb( + analytics_admin.UpdateDataRedactionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -71287,19 +72910,19 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = event_create_and_edit.EventCreateRule.to_json( - event_create_and_edit.EventCreateRule() + req.return_value._content = resources.DataRedactionSettings.to_json( + resources.DataRedactionSettings() ) - request = analytics_admin.UpdateEventCreateRuleRequest() + request = analytics_admin.UpdateDataRedactionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = event_create_and_edit.EventCreateRule() + post.return_value = resources.DataRedactionSettings() - client.update_event_create_rule( + client.update_data_redaction_settings( request, metadata=[ ("key", "val"), @@ -71311,8 +72934,9 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.UpdateEventCreateRuleRequest +def test_update_data_redaction_settings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.UpdateDataRedactionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71321,8 +72945,8 @@ def test_update_event_create_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "data_redaction_settings": { + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } } request = request_type(**request_init) @@ -71336,10 +72960,10 @@ def test_update_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_event_create_rule(request) + client.update_data_redaction_settings(request) -def test_update_event_create_rule_rest_flattened(): +def test_update_data_redaction_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71348,18 +72972,18 @@ def test_update_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = event_create_and_edit.EventCreateRule() + return_value = resources.DataRedactionSettings() # get arguments that satisfy an http rule for this method sample_request = { - "event_create_rule": { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "data_redaction_settings": { + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } } # get truthy value for each flattened field mock_args = dict( - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -71368,25 +72992,25 @@ def test_update_event_create_rule_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = event_create_and_edit.EventCreateRule.pb(return_value) + return_value = resources.DataRedactionSettings.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_event_create_rule(**mock_args) + client.update_data_redaction_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{event_create_rule.name=properties/*/dataStreams/*/eventCreateRules/*}" + "%s/v1alpha/{data_redaction_settings.name=properties/*/dataStreams/*/dataRedactionSettings}" % client.transport._host, args[1], ) -def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_update_data_redaction_settings_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -71395,14 +73019,14 @@ def test_update_event_create_rule_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_event_create_rule( - analytics_admin.UpdateEventCreateRuleRequest(), - event_create_rule=event_create_and_edit.EventCreateRule(name="name_value"), + client.update_data_redaction_settings( + analytics_admin.UpdateDataRedactionSettingsRequest(), + data_redaction_settings=resources.DataRedactionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_event_create_rule_rest_error(): +def test_update_data_redaction_settings_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -71411,11 +73035,11 @@ def test_update_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteEventCreateRuleRequest, + analytics_admin.GetDataRedactionSettingsRequest, dict, ], ) -def test_delete_event_create_rule_rest(request_type): +def test_get_data_redaction_settings_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71423,30 +73047,41 @@ def test_delete_event_create_rule_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRedactionSettings( + name="name_value", + email_redaction_enabled=True, + query_parameter_redaction_enabled=True, + query_parameter_keys=["query_parameter_keys_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DataRedactionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_create_rule(request) + response = client.get_data_redaction_settings(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, resources.DataRedactionSettings) + assert response.name == "name_value" + assert response.email_redaction_enabled is True + assert response.query_parameter_redaction_enabled is True + assert response.query_parameter_keys == ["query_parameter_keys_value"] -def test_delete_event_create_rule_rest_required_fields( - request_type=analytics_admin.DeleteEventCreateRuleRequest, +def test_get_data_redaction_settings_rest_required_fields( + request_type=analytics_admin.GetDataRedactionSettingsRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -71466,7 +73101,7 @@ def test_delete_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_create_rule._get_unset_required_fields(jsonified_request) + ).get_data_redaction_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -71475,7 +73110,7 @@ def test_delete_event_create_rule_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_event_create_rule._get_unset_required_fields(jsonified_request) + ).get_data_redaction_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -71489,7 +73124,7 @@ def test_delete_event_create_rule_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRedactionSettings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -71501,36 +73136,39 @@ def test_delete_event_create_rule_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = resources.DataRedactionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_event_create_rule(request) + response = client.get_data_redaction_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_event_create_rule_rest_unset_required_fields(): +def test_get_data_redaction_settings_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_event_create_rule._get_unset_required_fields({}) + unset_fields = transport.get_data_redaction_settings._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_event_create_rule_rest_interceptors(null_interceptor): +def test_get_data_redaction_settings_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -71543,11 +73181,16 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_event_create_rule" + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_redaction_settings", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_data_redaction_settings", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteEventCreateRuleRequest.pb( - analytics_admin.DeleteEventCreateRuleRequest() + post.assert_not_called() + pb_message = analytics_admin.GetDataRedactionSettingsRequest.pb( + analytics_admin.GetDataRedactionSettingsRequest() ) transcode.return_value = { "method": "post", @@ -71559,15 +73202,19 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = resources.DataRedactionSettings.to_json( + resources.DataRedactionSettings() + ) - request = analytics_admin.DeleteEventCreateRuleRequest() + request = analytics_admin.GetDataRedactionSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = resources.DataRedactionSettings() - client.delete_event_create_rule( + client.get_data_redaction_settings( request, metadata=[ ("key", "val"), @@ -71576,10 +73223,12 @@ def test_delete_event_create_rule_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_event_create_rule_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.DeleteEventCreateRuleRequest +def test_get_data_redaction_settings_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.GetDataRedactionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71588,7 +73237,7 @@ def test_delete_event_create_rule_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } request = request_type(**request_init) @@ -71601,10 +73250,10 @@ def test_delete_event_create_rule_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_event_create_rule(request) + client.get_data_redaction_settings(request) -def test_delete_event_create_rule_rest_flattened(): +def test_get_data_redaction_settings_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71613,11 +73262,11 @@ def test_delete_event_create_rule_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = resources.DataRedactionSettings() # get arguments that satisfy an http rule for this method sample_request = { - "name": "properties/sample1/dataStreams/sample2/eventCreateRules/sample3" + "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" } # get truthy value for each flattened field @@ -71629,39 +73278,250 @@ def test_delete_event_create_rule_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = resources.DataRedactionSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_redaction_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/dataStreams/*/dataRedactionSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_data_redaction_settings_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_redaction_settings( + analytics_admin.GetDataRedactionSettingsRequest(), + name="name_value", + ) + + +def test_get_data_redaction_settings_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.CreateRollupPropertyRequest, + dict, + ], +) +def test_create_rollup_property_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_admin.CreateRollupPropertyResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_admin.CreateRollupPropertyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + response = client.create_rollup_property(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + + +def test_create_rollup_property_rest_required_fields( + request_type=analytics_admin.CreateRollupPropertyRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rollup_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rollup_property._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_admin.CreateRollupPropertyResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_admin.CreateRollupPropertyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_rollup_property(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_rollup_property_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_rollup_property._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("rollupProperty",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_rollup_property_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "post_create_rollup_property" + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_rollup_property" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_admin.CreateRollupPropertyRequest.pb( + analytics_admin.CreateRollupPropertyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_admin.CreateRollupPropertyResponse.to_json( + analytics_admin.CreateRollupPropertyResponse() + ) + ) - client.delete_event_create_rule(**mock_args) + request = analytics_admin.CreateRollupPropertyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_admin.CreateRollupPropertyResponse() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/eventCreateRules/*}" - % client.transport._host, - args[1], + client.create_rollup_property( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -def test_delete_event_create_rule_rest_flattened_error(transport: str = "rest"): +def test_create_rollup_property_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateRollupPropertyRequest +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_event_create_rule( - analytics_admin.DeleteEventCreateRuleRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_rollup_property(request) -def test_delete_event_create_rule_rest_error(): +def test_create_rollup_property_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -71670,139 +73530,52 @@ def test_delete_event_create_rule_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.UpdateDataRedactionSettingsRequest, + analytics_admin.GetRollupPropertySourceLinkRequest, dict, ], ) -def test_update_data_redaction_settings_rest(request_type): +def test_get_rollup_property_source_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "data_redaction_settings": { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } - } - request_init["data_redaction_settings"] = { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings", - "email_redaction_enabled": True, - "query_parameter_redaction_enabled": True, - "query_parameter_keys": [ - "query_parameter_keys_value1", - "query_parameter_keys_value2", - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.UpdateDataRedactionSettingsRequest.meta.fields[ - "data_redaction_settings" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "data_redaction_settings" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_redaction_settings"][field])): - del request_init["data_redaction_settings"][field][i][subfield] - else: - del request_init["data_redaction_settings"][field][subfield] + request_init = {"name": "properties/sample1/rollupPropertySourceLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings( + return_value = resources.RollupPropertySourceLink( name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + source_property="source_property_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = resources.RollupPropertySourceLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_redaction_settings(request) + response = client.get_rollup_property_source_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) + assert isinstance(response, resources.RollupPropertySourceLink) assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert response.source_property == "source_property_value" -def test_update_data_redaction_settings_rest_required_fields( - request_type=analytics_admin.UpdateDataRedactionSettingsRequest, +def test_get_rollup_property_source_link_rest_required_fields( + request_type=analytics_admin.GetRollupPropertySourceLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -71817,19 +73590,21 @@ def test_update_data_redaction_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_redaction_settings._get_unset_required_fields(jsonified_request) + ).get_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_redaction_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71838,7 +73613,7 @@ def test_update_data_redaction_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings() + return_value = resources.RollupPropertySourceLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -71850,50 +73625,41 @@ def test_update_data_redaction_settings_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = resources.RollupPropertySourceLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_data_redaction_settings(request) + response = client.get_rollup_property_source_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_redaction_settings_rest_unset_required_fields(): +def test_get_rollup_property_source_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_redaction_settings._get_unset_required_fields( + unset_fields = transport.get_rollup_property_source_link._get_unset_required_fields( {} ) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "dataRedactionSettings", - "updateMask", - ) - ) - ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_redaction_settings_rest_interceptors(null_interceptor): +def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -71907,15 +73673,15 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_update_data_redaction_settings", + "post_get_rollup_property_source_link", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_update_data_redaction_settings", + "pre_get_rollup_property_source_link", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.UpdateDataRedactionSettingsRequest.pb( - analytics_admin.UpdateDataRedactionSettingsRequest() + pb_message = analytics_admin.GetRollupPropertySourceLinkRequest.pb( + analytics_admin.GetRollupPropertySourceLinkRequest() ) transcode.return_value = { "method": "post", @@ -71927,19 +73693,19 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataRedactionSettings.to_json( - resources.DataRedactionSettings() + req.return_value._content = resources.RollupPropertySourceLink.to_json( + resources.RollupPropertySourceLink() ) - request = analytics_admin.UpdateDataRedactionSettingsRequest() + request = analytics_admin.GetRollupPropertySourceLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataRedactionSettings() + post.return_value = resources.RollupPropertySourceLink() - client.update_data_redaction_settings( + client.get_rollup_property_source_link( request, metadata=[ ("key", "val"), @@ -71951,9 +73717,9 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_data_redaction_settings_rest_bad_request( +def test_get_rollup_property_source_link_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.UpdateDataRedactionSettingsRequest, + request_type=analytics_admin.GetRollupPropertySourceLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -71961,11 +73727,7 @@ def test_update_data_redaction_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "data_redaction_settings": { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } - } + request_init = {"name": "properties/sample1/rollupPropertySourceLinks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -71977,10 +73739,10 @@ def test_update_data_redaction_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_data_redaction_settings(request) + client.get_rollup_property_source_link(request) -def test_update_data_redaction_settings_rest_flattened(): +def test_get_rollup_property_source_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -71989,19 +73751,16 @@ def test_update_data_redaction_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings() + return_value = resources.RollupPropertySourceLink() # get arguments that satisfy an http rule for this method sample_request = { - "data_redaction_settings": { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } + "name": "properties/sample1/rollupPropertySourceLinks/sample2" } # get truthy value for each flattened field mock_args = dict( - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -72009,25 +73768,25 @@ def test_update_data_redaction_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = resources.RollupPropertySourceLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_data_redaction_settings(**mock_args) + client.get_rollup_property_source_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{data_redaction_settings.name=properties/*/dataStreams/*/dataRedactionSettings}" + "%s/v1alpha/{name=properties/*/rollupPropertySourceLinks/*}" % client.transport._host, args[1], ) -def test_update_data_redaction_settings_rest_flattened_error(transport: str = "rest"): +def test_get_rollup_property_source_link_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -72036,14 +73795,13 @@ def test_update_data_redaction_settings_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_redaction_settings( - analytics_admin.UpdateDataRedactionSettingsRequest(), - data_redaction_settings=resources.DataRedactionSettings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_rollup_property_source_link( + analytics_admin.GetRollupPropertySourceLinkRequest(), + name="name_value", ) -def test_update_data_redaction_settings_rest_error(): +def test_get_rollup_property_source_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -72052,58 +73810,52 @@ def test_update_data_redaction_settings_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetDataRedactionSettingsRequest, + analytics_admin.ListRollupPropertySourceLinksRequest, dict, ], ) -def test_get_data_redaction_settings_rest(request_type): +def test_list_rollup_property_source_links_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings( - name="name_value", - email_redaction_enabled=True, - query_parameter_redaction_enabled=True, - query_parameter_keys=["query_parameter_keys_value"], + return_value = analytics_admin.ListRollupPropertySourceLinksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_redaction_settings(request) + response = client.list_rollup_property_source_links(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.DataRedactionSettings) - assert response.name == "name_value" - assert response.email_redaction_enabled is True - assert response.query_parameter_redaction_enabled is True - assert response.query_parameter_keys == ["query_parameter_keys_value"] + assert isinstance(response, pagers.ListRollupPropertySourceLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_get_data_redaction_settings_rest_required_fields( - request_type=analytics_admin.GetDataRedactionSettingsRequest, +def test_list_rollup_property_source_links_rest_required_fields( + request_type=analytics_admin.ListRollupPropertySourceLinksRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -72118,21 +73870,28 @@ def test_get_data_redaction_settings_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_redaction_settings._get_unset_required_fields(jsonified_request) + ).list_rollup_property_source_links._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_redaction_settings._get_unset_required_fields(jsonified_request) + ).list_rollup_property_source_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72141,7 +73900,7 @@ def test_get_data_redaction_settings_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings() + return_value = analytics_admin.ListRollupPropertySourceLinksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72162,30 +73921,42 @@ def test_get_data_redaction_settings_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_data_redaction_settings(request) + response = client.list_rollup_property_source_links(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_data_redaction_settings_rest_unset_required_fields(): +def test_list_rollup_property_source_links_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_data_redaction_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.list_rollup_property_source_links._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_redaction_settings_rest_interceptors(null_interceptor): +def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72199,15 +73970,15 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_data_redaction_settings", + "post_list_rollup_property_source_links", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_data_redaction_settings", + "pre_list_rollup_property_source_links", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.GetDataRedactionSettingsRequest.pb( - analytics_admin.GetDataRedactionSettingsRequest() + pb_message = analytics_admin.ListRollupPropertySourceLinksRequest.pb( + analytics_admin.ListRollupPropertySourceLinksRequest() ) transcode.return_value = { "method": "post", @@ -72219,19 +73990,21 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.DataRedactionSettings.to_json( - resources.DataRedactionSettings() + req.return_value._content = ( + analytics_admin.ListRollupPropertySourceLinksResponse.to_json( + analytics_admin.ListRollupPropertySourceLinksResponse() + ) ) - request = analytics_admin.GetDataRedactionSettingsRequest() + request = analytics_admin.ListRollupPropertySourceLinksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.DataRedactionSettings() + post.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() - client.get_data_redaction_settings( + client.list_rollup_property_source_links( request, metadata=[ ("key", "val"), @@ -72243,9 +74016,9 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_data_redaction_settings_rest_bad_request( +def test_list_rollup_property_source_links_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.GetDataRedactionSettingsRequest, + request_type=analytics_admin.ListRollupPropertySourceLinksRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72253,9 +74026,7 @@ def test_get_data_redaction_settings_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -72267,10 +74038,10 @@ def test_get_data_redaction_settings_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_data_redaction_settings(request) + client.list_rollup_property_source_links(request) -def test_get_data_redaction_settings_rest_flattened(): +def test_list_rollup_property_source_links_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -72279,16 +74050,14 @@ def test_get_data_redaction_settings_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.DataRedactionSettings() + return_value = analytics_admin.ListRollupPropertySourceLinksResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/dataStreams/sample2/dataRedactionSettings" - } + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -72296,25 +74065,29 @@ def test_get_data_redaction_settings_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.DataRedactionSettings.pb(return_value) + return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_data_redaction_settings(**mock_args) + client.list_rollup_property_source_links(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/dataStreams/*/dataRedactionSettings}" + "%s/v1alpha/{parent=properties/*}/rollupPropertySourceLinks" % client.transport._host, args[1], ) -def test_get_data_redaction_settings_rest_flattened_error(transport: str = "rest"): +def test_list_rollup_property_source_links_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -72323,61 +74096,204 @@ def test_get_data_redaction_settings_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_redaction_settings( - analytics_admin.GetDataRedactionSettingsRequest(), - name="name_value", + client.list_rollup_property_source_links( + analytics_admin.ListRollupPropertySourceLinksRequest(), + parent="parent_value", ) -def test_get_data_redaction_settings_rest_error(): +def test_list_rollup_property_source_links_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + next_page_token="abc", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[], + next_page_token="def", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + ], + next_page_token="ghi", + ), + analytics_admin.ListRollupPropertySourceLinksResponse( + rollup_property_source_links=[ + resources.RollupPropertySourceLink(), + resources.RollupPropertySourceLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListRollupPropertySourceLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_rollup_property_source_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.RollupPropertySourceLink) for i in results) + + pages = list( + client.list_rollup_property_source_links(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateRollupPropertyRequest, + analytics_admin.CreateRollupPropertySourceLinkRequest, dict, ], ) -def test_create_rollup_property_rest(request_type): +def test_create_rollup_property_source_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {} + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["rollup_property_source_link"] = { + "name": "name_value", + "source_property": "source_property_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_admin.CreateRollupPropertySourceLinkRequest.meta.fields[ + "rollup_property_source_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "rollup_property_source_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["rollup_property_source_link"][field]) + ): + del request_init["rollup_property_source_link"][field][i][subfield] + else: + del request_init["rollup_property_source_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateRollupPropertyResponse() + return_value = resources.RollupPropertySourceLink( + name="name_value", + source_property="source_property_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateRollupPropertyResponse.pb(return_value) + return_value = resources.RollupPropertySourceLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_rollup_property(request) + response = client.create_rollup_property_source_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateRollupPropertyResponse) + assert isinstance(response, resources.RollupPropertySourceLink) + assert response.name == "name_value" + assert response.source_property == "source_property_value" -def test_create_rollup_property_rest_required_fields( - request_type=analytics_admin.CreateRollupPropertyRequest, +def test_create_rollup_property_source_link_rest_required_fields( + request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -72392,17 +74308,21 @@ def test_create_rollup_property_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_rollup_property._get_unset_required_fields(jsonified_request) + ).create_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_rollup_property._get_unset_required_fields(jsonified_request) + ).create_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72411,7 +74331,7 @@ def test_create_rollup_property_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateRollupPropertyResponse() + return_value = resources.RollupPropertySourceLink() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72433,30 +74353,40 @@ def test_create_rollup_property_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateRollupPropertyResponse.pb(return_value) + return_value = resources.RollupPropertySourceLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_rollup_property(request) + response = client.create_rollup_property_source_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_rollup_property_rest_unset_required_fields(): +def test_create_rollup_property_source_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_rollup_property._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("rollupProperty",))) + unset_fields = ( + transport.create_rollup_property_source_link._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "rollupPropertySourceLink", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_rollup_property_rest_interceptors(null_interceptor): +def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72469,14 +74399,16 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_rollup_property" + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_rollup_property_source_link", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_rollup_property" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_create_rollup_property_source_link", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateRollupPropertyRequest.pb( - analytics_admin.CreateRollupPropertyRequest() + pb_message = analytics_admin.CreateRollupPropertySourceLinkRequest.pb( + analytics_admin.CreateRollupPropertySourceLinkRequest() ) transcode.return_value = { "method": "post", @@ -72488,21 +74420,19 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.CreateRollupPropertyResponse.to_json( - analytics_admin.CreateRollupPropertyResponse() - ) + req.return_value._content = resources.RollupPropertySourceLink.to_json( + resources.RollupPropertySourceLink() ) - request = analytics_admin.CreateRollupPropertyRequest() + request = analytics_admin.CreateRollupPropertySourceLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.CreateRollupPropertyResponse() + post.return_value = resources.RollupPropertySourceLink() - client.create_rollup_property( + client.create_rollup_property_source_link( request, metadata=[ ("key", "val"), @@ -72514,8 +74444,9 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_rollup_property_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateRollupPropertyRequest +def test_create_rollup_property_source_link_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72523,7 +74454,7 @@ def test_create_rollup_property_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -72535,10 +74466,75 @@ def test_create_rollup_property_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_rollup_property(request) + client.create_rollup_property_source_link(request) -def test_create_rollup_property_rest_error(): +def test_create_rollup_property_source_link_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.RollupPropertySourceLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.RollupPropertySourceLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_rollup_property_source_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/rollupPropertySourceLinks" + % client.transport._host, + args[1], + ) + + +def test_create_rollup_property_source_link_rest_flattened_error( + transport: str = "rest", +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rollup_property_source_link( + analytics_admin.CreateRollupPropertySourceLinkRequest(), + parent="parent_value", + rollup_property_source_link=resources.RollupPropertySourceLink( + name="name_value" + ), + ) + + +def test_create_rollup_property_source_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -72547,11 +74543,11 @@ def test_create_rollup_property_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.GetRollupPropertySourceLinkRequest, + analytics_admin.DeleteRollupPropertySourceLinkRequest, dict, ], ) -def test_get_rollup_property_source_link_rest(request_type): +def test_delete_rollup_property_source_link_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -72564,30 +74560,23 @@ def test_get_rollup_property_source_link_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink( - name="name_value", - source_property="source_property_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_rollup_property_source_link(request) + response = client.delete_rollup_property_source_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) - assert response.name == "name_value" - assert response.source_property == "source_property_value" + assert response is None -def test_get_rollup_property_source_link_rest_required_fields( - request_type=analytics_admin.GetRollupPropertySourceLinkRequest, +def test_delete_rollup_property_source_link_rest_required_fields( + request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -72607,7 +74596,7 @@ def test_get_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).delete_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -72616,7 +74605,7 @@ def test_get_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).delete_rollup_property_source_link._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -72630,7 +74619,7 @@ def test_get_rollup_property_source_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72642,41 +74631,38 @@ def test_get_rollup_property_source_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_rollup_property_source_link(request) + response = client.delete_rollup_property_source_link(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_rollup_property_source_link_rest_unset_required_fields(): +def test_delete_rollup_property_source_link_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_rollup_property_source_link._get_unset_required_fields( - {} + unset_fields = ( + transport.delete_rollup_property_source_link._get_unset_required_fields({}) ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): +def test_delete_rollup_property_source_link_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72690,15 +74676,11 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_get_rollup_property_source_link", - ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_get_rollup_property_source_link", + "pre_delete_rollup_property_source_link", ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_admin.GetRollupPropertySourceLinkRequest.pb( - analytics_admin.GetRollupPropertySourceLinkRequest() + pb_message = analytics_admin.DeleteRollupPropertySourceLinkRequest.pb( + analytics_admin.DeleteRollupPropertySourceLinkRequest() ) transcode.return_value = { "method": "post", @@ -72710,19 +74692,15 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.RollupPropertySourceLink.to_json( - resources.RollupPropertySourceLink() - ) - request = analytics_admin.GetRollupPropertySourceLinkRequest() + request = analytics_admin.DeleteRollupPropertySourceLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.RollupPropertySourceLink() - client.get_rollup_property_source_link( + client.delete_rollup_property_source_link( request, metadata=[ ("key", "val"), @@ -72731,12 +74709,11 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_rollup_property_source_link_rest_bad_request( +def test_delete_rollup_property_source_link_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.GetRollupPropertySourceLinkRequest, + request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -72756,10 +74733,10 @@ def test_get_rollup_property_source_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_rollup_property_source_link(request) + client.delete_rollup_property_source_link(request) -def test_get_rollup_property_source_link_rest_flattened(): +def test_delete_rollup_property_source_link_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -72768,7 +74745,7 @@ def test_get_rollup_property_source_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { @@ -72784,13 +74761,11 @@ def test_get_rollup_property_source_link_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_rollup_property_source_link(**mock_args) + client.delete_rollup_property_source_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -72803,7 +74778,9 @@ def test_get_rollup_property_source_link_rest_flattened(): ) -def test_get_rollup_property_source_link_rest_flattened_error(transport: str = "rest"): +def test_delete_rollup_property_source_link_rest_flattened_error( + transport: str = "rest", +): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -72812,13 +74789,13 @@ def test_get_rollup_property_source_link_rest_flattened_error(transport: str = " # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_rollup_property_source_link( - analytics_admin.GetRollupPropertySourceLinkRequest(), + client.delete_rollup_property_source_link( + analytics_admin.DeleteRollupPropertySourceLinkRequest(), name="name_value", ) -def test_get_rollup_property_source_link_rest_error(): +def test_delete_rollup_property_source_link_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -72827,47 +74804,42 @@ def test_get_rollup_property_source_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.ListRollupPropertySourceLinksRequest, + analytics_admin.CreateSubpropertyRequest, dict, ], ) -def test_list_rollup_property_source_links_rest(request_type): +def test_create_subproperty_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListRollupPropertySourceLinksResponse( - next_page_token="next_page_token_value", - ) + return_value = analytics_admin.CreateSubpropertyResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( - return_value - ) + return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_rollup_property_source_links(request) + response = client.create_subproperty(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRollupPropertySourceLinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_admin.CreateSubpropertyResponse) -def test_list_rollup_property_source_links_rest_required_fields( - request_type=analytics_admin.ListRollupPropertySourceLinksRequest, +def test_create_subproperty_rest_required_fields( + request_type=analytics_admin.CreateSubpropertyRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -72887,7 +74859,7 @@ def test_list_rollup_property_source_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_rollup_property_source_links._get_unset_required_fields(jsonified_request) + ).create_subproperty._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -72896,14 +74868,7 @@ def test_list_rollup_property_source_links_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_rollup_property_source_links._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_subproperty._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -72917,7 +74882,7 @@ def test_list_rollup_property_source_links_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + return_value = analytics_admin.CreateSubpropertyResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -72929,51 +74894,48 @@ def test_list_rollup_property_source_links_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( - return_value - ) + return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_rollup_property_source_links(request) + response = client.create_subproperty(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rollup_property_source_links_rest_unset_required_fields(): +def test_create_subproperty_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.list_rollup_property_source_links._get_unset_required_fields({}) - ) + unset_fields = transport.create_subproperty._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "subproperty", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): +def test_create_subproperty_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -72986,16 +74948,14 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "post_list_rollup_property_source_links", + transports.AnalyticsAdminServiceRestInterceptor, "post_create_subproperty" ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_list_rollup_property_source_links", + transports.AnalyticsAdminServiceRestInterceptor, "pre_create_subproperty" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.ListRollupPropertySourceLinksRequest.pb( - analytics_admin.ListRollupPropertySourceLinksRequest() + pb_message = analytics_admin.CreateSubpropertyRequest.pb( + analytics_admin.CreateSubpropertyRequest() ) transcode.return_value = { "method": "post", @@ -73007,21 +74967,19 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_admin.ListRollupPropertySourceLinksResponse.to_json( - analytics_admin.ListRollupPropertySourceLinksResponse() - ) + req.return_value._content = analytics_admin.CreateSubpropertyResponse.to_json( + analytics_admin.CreateSubpropertyResponse() ) - request = analytics_admin.ListRollupPropertySourceLinksRequest() + request = analytics_admin.CreateSubpropertyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + post.return_value = analytics_admin.CreateSubpropertyResponse() - client.list_rollup_property_source_links( + client.create_subproperty( request, metadata=[ ("key", "val"), @@ -73033,9 +74991,8 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_rollup_property_source_links_rest_bad_request( - transport: str = "rest", - request_type=analytics_admin.ListRollupPropertySourceLinksRequest, +def test_create_subproperty_rest_bad_request( + transport: str = "rest", request_type=analytics_admin.CreateSubpropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73043,7 +75000,7 @@ def test_list_rollup_property_source_links_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -73055,144 +75012,23 @@ def test_list_rollup_property_source_links_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_rollup_property_source_links(request) - - -def test_list_rollup_property_source_links_rest_flattened(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_admin.ListRollupPropertySourceLinksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_admin.ListRollupPropertySourceLinksResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_rollup_property_source_links(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/rollupPropertySourceLinks" - % client.transport._host, - args[1], - ) - - -def test_list_rollup_property_source_links_rest_flattened_error( - transport: str = "rest", -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_rollup_property_source_links( - analytics_admin.ListRollupPropertySourceLinksRequest(), - parent="parent_value", - ) + client.create_subproperty(request) -def test_list_rollup_property_source_links_rest_pager(transport: str = "rest"): +def test_create_subproperty_rest_error(): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - next_page_token="abc", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[], - next_page_token="def", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - ], - next_page_token="ghi", - ), - analytics_admin.ListRollupPropertySourceLinksResponse( - rollup_property_source_links=[ - resources.RollupPropertySourceLink(), - resources.RollupPropertySourceLink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_admin.ListRollupPropertySourceLinksResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_rollup_property_source_links(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.RollupPropertySourceLink) for i in results) - - pages = list( - client.list_rollup_property_source_links(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateRollupPropertySourceLinkRequest, + analytics_admin.CreateSubpropertyEventFilterRequest, dict, ], ) -def test_create_rollup_property_source_link_rest(request_type): +def test_create_subproperty_event_filter_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73200,17 +75036,35 @@ def test_create_rollup_property_source_link_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["rollup_property_source_link"] = { + request_init["subproperty_event_filter"] = { "name": "name_value", - "source_property": "source_property_value", + "apply_to_property": "apply_to_property_value", + "filter_clauses": [ + { + "filter_clause_type": 1, + "filter_expression": { + "or_group": {"filter_expressions": {}}, + "not_expression": {}, + "filter_condition": { + "null_filter": True, + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "field_name": "field_name_value", + }, + }, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateRollupPropertySourceLinkRequest.meta.fields[ - "rollup_property_source_link" + test_field = analytics_admin.CreateSubpropertyEventFilterRequest.meta.fields[ + "subproperty_event_filter" ] def get_message_fields(field): @@ -73240,7 +75094,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "rollup_property_source_link" + "subproperty_event_filter" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -73271,41 +75125,41 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range( - 0, len(request_init["rollup_property_source_link"][field]) - ): - del request_init["rollup_property_source_link"][field][i][subfield] + for i in range(0, len(request_init["subproperty_event_filter"][field])): + del request_init["subproperty_event_filter"][field][i][subfield] else: - del request_init["rollup_property_source_link"][field][subfield] + del request_init["subproperty_event_filter"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink( + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value", - source_property="source_property_value", + apply_to_property="apply_to_property_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_rollup_property_source_link(request) + response = client.create_subproperty_event_filter(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.RollupPropertySourceLink) + assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) assert response.name == "name_value" - assert response.source_property == "source_property_value" + assert response.apply_to_property == "apply_to_property_value" -def test_create_rollup_property_source_link_rest_required_fields( - request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, +def test_create_subproperty_event_filter_rest_required_fields( + request_type=analytics_admin.CreateSubpropertyEventFilterRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -73325,7 +75179,7 @@ def test_create_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).create_subproperty_event_filter._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -73334,7 +75188,7 @@ def test_create_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).create_subproperty_event_filter._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -73348,7 +75202,7 @@ def test_create_rollup_property_source_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink() + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73370,40 +75224,42 @@ def test_create_rollup_property_source_link_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_rollup_property_source_link(request) + response = client.create_subproperty_event_filter(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_rollup_property_source_link_rest_unset_required_fields(): +def test_create_subproperty_event_filter_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.create_rollup_property_source_link._get_unset_required_fields({}) + unset_fields = transport.create_subproperty_event_filter._get_unset_required_fields( + {} ) assert set(unset_fields) == ( set(()) & set( ( "parent", - "rollupPropertySourceLink", + "subpropertyEventFilter", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): +def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73417,15 +75273,15 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_create_rollup_property_source_link", + "post_create_subproperty_event_filter", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_rollup_property_source_link", + "pre_create_subproperty_event_filter", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateRollupPropertySourceLinkRequest.pb( - analytics_admin.CreateRollupPropertySourceLinkRequest() + pb_message = analytics_admin.CreateSubpropertyEventFilterRequest.pb( + analytics_admin.CreateSubpropertyEventFilterRequest() ) transcode.return_value = { "method": "post", @@ -73437,19 +75293,21 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.RollupPropertySourceLink.to_json( - resources.RollupPropertySourceLink() + req.return_value._content = ( + gaa_subproperty_event_filter.SubpropertyEventFilter.to_json( + gaa_subproperty_event_filter.SubpropertyEventFilter() + ) ) - request = analytics_admin.CreateRollupPropertySourceLinkRequest() + request = analytics_admin.CreateSubpropertyEventFilterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.RollupPropertySourceLink() + post.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() - client.create_rollup_property_source_link( + client.create_subproperty_event_filter( request, metadata=[ ("key", "val"), @@ -73461,9 +75319,9 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_rollup_property_source_link_rest_bad_request( +def test_create_subproperty_event_filter_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.CreateRollupPropertySourceLinkRequest, + request_type=analytics_admin.CreateSubpropertyEventFilterRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73483,10 +75341,10 @@ def test_create_rollup_property_source_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_rollup_property_source_link(request) + client.create_subproperty_event_filter(request) -def test_create_rollup_property_source_link_rest_flattened(): +def test_create_subproperty_event_filter_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73495,7 +75353,7 @@ def test_create_rollup_property_source_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.RollupPropertySourceLink() + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -73503,7 +75361,7 @@ def test_create_rollup_property_source_link_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), ) @@ -73513,27 +75371,27 @@ def test_create_rollup_property_source_link_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.RollupPropertySourceLink.pb(return_value) + return_value = gaa_subproperty_event_filter.SubpropertyEventFilter.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_rollup_property_source_link(**mock_args) + client.create_subproperty_event_filter(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/rollupPropertySourceLinks" + "%s/v1alpha/{parent=properties/*}/subpropertyEventFilters" % client.transport._host, args[1], ) -def test_create_rollup_property_source_link_rest_flattened_error( - transport: str = "rest", -): +def test_create_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -73542,16 +75400,16 @@ def test_create_rollup_property_source_link_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_rollup_property_source_link( - analytics_admin.CreateRollupPropertySourceLinkRequest(), + client.create_subproperty_event_filter( + analytics_admin.CreateSubpropertyEventFilterRequest(), parent="parent_value", - rollup_property_source_link=resources.RollupPropertySourceLink( + subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), ) -def test_create_rollup_property_source_link_rest_error(): +def test_create_subproperty_event_filter_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -73560,40 +75418,47 @@ def test_create_rollup_property_source_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.DeleteRollupPropertySourceLinkRequest, + analytics_admin.GetSubpropertyEventFilterRequest, dict, ], ) -def test_delete_rollup_property_source_link_rest(request_type): +def test_get_subproperty_event_filter_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/rollupPropertySourceLinks/sample2"} + request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = subproperty_event_filter.SubpropertyEventFilter( + name="name_value", + apply_to_property="apply_to_property_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = subproperty_event_filter.SubpropertyEventFilter.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_rollup_property_source_link(request) + response = client.get_subproperty_event_filter(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, subproperty_event_filter.SubpropertyEventFilter) + assert response.name == "name_value" + assert response.apply_to_property == "apply_to_property_value" -def test_delete_rollup_property_source_link_rest_required_fields( - request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, +def test_get_subproperty_event_filter_rest_required_fields( + request_type=analytics_admin.GetSubpropertyEventFilterRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -73613,7 +75478,7 @@ def test_delete_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).get_subproperty_event_filter._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -73622,7 +75487,7 @@ def test_delete_rollup_property_source_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_rollup_property_source_link._get_unset_required_fields(jsonified_request) + ).get_subproperty_event_filter._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -73636,7 +75501,7 @@ def test_delete_rollup_property_source_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = subproperty_event_filter.SubpropertyEventFilter() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73648,38 +75513,41 @@ def test_delete_rollup_property_source_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = subproperty_event_filter.SubpropertyEventFilter.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_rollup_property_source_link(request) + response = client.get_subproperty_event_filter(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_rollup_property_source_link_rest_unset_required_fields(): +def test_get_subproperty_event_filter_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.delete_rollup_property_source_link._get_unset_required_fields({}) - ) + unset_fields = transport.get_subproperty_event_filter._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_rollup_property_source_link_rest_interceptors(null_interceptor): +def test_get_subproperty_event_filter_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73693,11 +75561,15 @@ def test_delete_rollup_property_source_link_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_rollup_property_source_link", + "post_get_subproperty_event_filter", + ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_subproperty_event_filter", ) as pre: pre.assert_not_called() - pb_message = analytics_admin.DeleteRollupPropertySourceLinkRequest.pb( - analytics_admin.DeleteRollupPropertySourceLinkRequest() + post.assert_not_called() + pb_message = analytics_admin.GetSubpropertyEventFilterRequest.pb( + analytics_admin.GetSubpropertyEventFilterRequest() ) transcode.return_value = { "method": "post", @@ -73709,15 +75581,21 @@ def test_delete_rollup_property_source_link_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + subproperty_event_filter.SubpropertyEventFilter.to_json( + subproperty_event_filter.SubpropertyEventFilter() + ) + ) - request = analytics_admin.DeleteRollupPropertySourceLinkRequest() + request = analytics_admin.GetSubpropertyEventFilterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = subproperty_event_filter.SubpropertyEventFilter() - client.delete_rollup_property_source_link( + client.get_subproperty_event_filter( request, metadata=[ ("key", "val"), @@ -73726,11 +75604,12 @@ def test_delete_rollup_property_source_link_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_rollup_property_source_link_rest_bad_request( +def test_get_subproperty_event_filter_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.DeleteRollupPropertySourceLinkRequest, + request_type=analytics_admin.GetSubpropertyEventFilterRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -73738,7 +75617,7 @@ def test_delete_rollup_property_source_link_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/rollupPropertySourceLinks/sample2"} + request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -73750,10 +75629,10 @@ def test_delete_rollup_property_source_link_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_rollup_property_source_link(request) + client.get_subproperty_event_filter(request) -def test_delete_rollup_property_source_link_rest_flattened(): +def test_get_subproperty_event_filter_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -73762,12 +75641,10 @@ def test_delete_rollup_property_source_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = subproperty_event_filter.SubpropertyEventFilter() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "properties/sample1/rollupPropertySourceLinks/sample2" - } + sample_request = {"name": "properties/sample1/subpropertyEventFilters/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -73778,26 +75655,26 @@ def test_delete_rollup_property_source_link_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = subproperty_event_filter.SubpropertyEventFilter.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_rollup_property_source_link(**mock_args) + client.get_subproperty_event_filter(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/rollupPropertySourceLinks/*}" + "%s/v1alpha/{name=properties/*/subpropertyEventFilters/*}" % client.transport._host, args[1], ) -def test_delete_rollup_property_source_link_rest_flattened_error( - transport: str = "rest", -): +def test_get_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -73806,13 +75683,13 @@ def test_delete_rollup_property_source_link_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_rollup_property_source_link( - analytics_admin.DeleteRollupPropertySourceLinkRequest(), + client.get_subproperty_event_filter( + analytics_admin.GetSubpropertyEventFilterRequest(), name="name_value", ) -def test_delete_rollup_property_source_link_rest_error(): +def test_get_subproperty_event_filter_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -73821,42 +75698,47 @@ def test_delete_rollup_property_source_link_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSubpropertyRequest, + analytics_admin.ListSubpropertyEventFiltersRequest, dict, ], ) -def test_create_subproperty_rest(request_type): +def test_list_subproperty_event_filters_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateSubpropertyResponse() + return_value = analytics_admin.ListSubpropertyEventFiltersResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) + return_value = analytics_admin.ListSubpropertyEventFiltersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty(request) + response = client.list_subproperty_event_filters(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_admin.CreateSubpropertyResponse) + assert isinstance(response, pagers.ListSubpropertyEventFiltersPager) + assert response.next_page_token == "next_page_token_value" -def test_create_subproperty_rest_required_fields( - request_type=analytics_admin.CreateSubpropertyRequest, +def test_list_subproperty_event_filters_rest_required_fields( + request_type=analytics_admin.ListSubpropertyEventFiltersRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport @@ -73876,7 +75758,7 @@ def test_create_subproperty_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty._get_unset_required_fields(jsonified_request) + ).list_subproperty_event_filters._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -73885,7 +75767,14 @@ def test_create_subproperty_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty._get_unset_required_fields(jsonified_request) + ).list_subproperty_event_filters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -73899,7 +75788,7 @@ def test_create_subproperty_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_admin.CreateSubpropertyResponse() + return_value = analytics_admin.ListSubpropertyEventFiltersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -73911,48 +75800,51 @@ def test_create_subproperty_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_admin.CreateSubpropertyResponse.pb(return_value) + return_value = analytics_admin.ListSubpropertyEventFiltersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty(request) + response = client.list_subproperty_event_filters(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_subproperty_rest_unset_required_fields(): +def test_list_subproperty_event_filters_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_subproperty._get_unset_required_fields({}) + unset_fields = transport.list_subproperty_event_filters._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "subproperty", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_subproperty_rest_interceptors(null_interceptor): +def test_list_subproperty_event_filters_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -73965,14 +75857,16 @@ def test_create_subproperty_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "post_create_subproperty" + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_subproperty_event_filters", ) as post, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, "pre_create_subproperty" + transports.AnalyticsAdminServiceRestInterceptor, + "pre_list_subproperty_event_filters", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateSubpropertyRequest.pb( - analytics_admin.CreateSubpropertyRequest() + pb_message = analytics_admin.ListSubpropertyEventFiltersRequest.pb( + analytics_admin.ListSubpropertyEventFiltersRequest() ) transcode.return_value = { "method": "post", @@ -73984,19 +75878,21 @@ def test_create_subproperty_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_admin.CreateSubpropertyResponse.to_json( - analytics_admin.CreateSubpropertyResponse() + req.return_value._content = ( + analytics_admin.ListSubpropertyEventFiltersResponse.to_json( + analytics_admin.ListSubpropertyEventFiltersResponse() + ) ) - request = analytics_admin.CreateSubpropertyRequest() + request = analytics_admin.ListSubpropertyEventFiltersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_admin.CreateSubpropertyResponse() + post.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() - client.create_subproperty( + client.list_subproperty_event_filters( request, metadata=[ ("key", "val"), @@ -74008,210 +75904,9 @@ def test_create_subproperty_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_subproperty_rest_bad_request( - transport: str = "rest", request_type=analytics_admin.CreateSubpropertyRequest -): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_subproperty(request) - - -def test_create_subproperty_rest_error(): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_admin.DeleteSubpropertyEventFilterRequest, - dict, - ], -) -def test_delete_subproperty_event_filter_rest(request_type): - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_subproperty_event_filter(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_subproperty_event_filter_rest_required_fields( - request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, -): - transport_class = transports.AnalyticsAdminServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_subproperty_event_filter._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_subproperty_event_filter._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_subproperty_event_filter(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_subproperty_event_filter_rest_unset_required_fields(): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_subproperty_event_filter._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_subproperty_event_filter_rest_interceptors(null_interceptor): - transport = transports.AnalyticsAdminServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.AnalyticsAdminServiceRestInterceptor(), - ) - client = AnalyticsAdminServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.AnalyticsAdminServiceRestInterceptor, - "pre_delete_subproperty_event_filter", - ) as pre: - pre.assert_not_called() - pb_message = analytics_admin.DeleteSubpropertyEventFilterRequest.pb( - analytics_admin.DeleteSubpropertyEventFilterRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = analytics_admin.DeleteSubpropertyEventFilterRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_subproperty_event_filter( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_subproperty_event_filter_rest_bad_request( +def test_list_subproperty_event_filters_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, + request_type=analytics_admin.ListSubpropertyEventFiltersRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74219,7 +75914,7 @@ def test_delete_subproperty_event_filter_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -74231,10 +75926,10 @@ def test_delete_subproperty_event_filter_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_subproperty_event_filter(request) + client.list_subproperty_event_filters(request) -def test_delete_subproperty_event_filter_rest_flattened(): +def test_list_subproperty_event_filters_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -74243,38 +75938,42 @@ def test_delete_subproperty_event_filter_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = analytics_admin.ListSubpropertyEventFiltersResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/subpropertyEventFilters/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = analytics_admin.ListSubpropertyEventFiltersResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_subproperty_event_filter(**mock_args) + client.list_subproperty_event_filters(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/subpropertyEventFilters/*}" + "%s/v1alpha/{parent=properties/*}/subpropertyEventFilters" % client.transport._host, args[1], ) -def test_delete_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): +def test_list_subproperty_event_filters_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -74283,35 +75982,102 @@ def test_delete_subproperty_event_filter_rest_flattened_error(transport: str = " # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_subproperty_event_filter( - analytics_admin.DeleteSubpropertyEventFilterRequest(), - name="name_value", + client.list_subproperty_event_filters( + analytics_admin.ListSubpropertyEventFiltersRequest(), + parent="parent_value", ) -def test_delete_subproperty_event_filter_rest_error(): +def test_list_subproperty_event_filters_rest_pager(transport: str = "rest"): client = AnalyticsAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="abc", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[], + next_page_token="def", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + ], + next_page_token="ghi", + ), + analytics_admin.ListSubpropertyEventFiltersResponse( + subproperty_event_filters=[ + subproperty_event_filter.SubpropertyEventFilter(), + subproperty_event_filter.SubpropertyEventFilter(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_admin.ListSubpropertyEventFiltersResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_subproperty_event_filters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, subproperty_event_filter.SubpropertyEventFilter) + for i in results + ) + + pages = list( + client.list_subproperty_event_filters(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_admin.CreateSubpropertyEventFilterRequest, + analytics_admin.UpdateSubpropertyEventFilterRequest, dict, ], ) -def test_create_subproperty_event_filter_rest(request_type): +def test_update_subproperty_event_filter_rest(request_type): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "subproperty_event_filter": { + "name": "properties/sample1/subpropertyEventFilters/sample2" + } + } request_init["subproperty_event_filter"] = { - "name": "name_value", + "name": "properties/sample1/subpropertyEventFilters/sample2", "apply_to_property": "apply_to_property_value", "filter_clauses": [ { @@ -74337,7 +76103,7 @@ def test_create_subproperty_event_filter_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = analytics_admin.CreateSubpropertyEventFilterRequest.meta.fields[ + test_field = analytics_admin.UpdateSubpropertyEventFilterRequest.meta.fields[ "subproperty_event_filter" ] @@ -74424,7 +76190,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty_event_filter(request) + response = client.update_subproperty_event_filter(request) # Establish that the response is the type that we expect. assert isinstance(response, gaa_subproperty_event_filter.SubpropertyEventFilter) @@ -74432,13 +76198,12 @@ def get_message_fields(field): assert response.apply_to_property == "apply_to_property_value" -def test_create_subproperty_event_filter_rest_required_fields( - request_type=analytics_admin.CreateSubpropertyEventFilterRequest, +def test_update_subproperty_event_filter_rest_required_fields( + request_type=analytics_admin.UpdateSubpropertyEventFilterRequest, ): transport_class = transports.AnalyticsAdminServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -74453,21 +76218,19 @@ def test_create_subproperty_event_filter_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty_event_filter._get_unset_required_fields(jsonified_request) + ).update_subproperty_event_filter._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_subproperty_event_filter._get_unset_required_fields(jsonified_request) + ).update_subproperty_event_filter._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74488,7 +76251,7 @@ def test_create_subproperty_event_filter_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -74506,34 +76269,34 @@ def test_create_subproperty_event_filter_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_subproperty_event_filter(request) + response = client.update_subproperty_event_filter(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_subproperty_event_filter_rest_unset_required_fields(): +def test_update_subproperty_event_filter_rest_unset_required_fields(): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_subproperty_event_filter._get_unset_required_fields( + unset_fields = transport.update_subproperty_event_filter._get_unset_required_fields( {} ) assert set(unset_fields) == ( - set(()) + set(("updateMask",)) & set( ( - "parent", "subpropertyEventFilter", + "updateMask", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): +def test_update_subproperty_event_filter_rest_interceptors(null_interceptor): transport = transports.AnalyticsAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -74547,15 +76310,15 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "post_create_subproperty_event_filter", + "post_update_subproperty_event_filter", ) as post, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, - "pre_create_subproperty_event_filter", + "pre_update_subproperty_event_filter", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_admin.CreateSubpropertyEventFilterRequest.pb( - analytics_admin.CreateSubpropertyEventFilterRequest() + pb_message = analytics_admin.UpdateSubpropertyEventFilterRequest.pb( + analytics_admin.UpdateSubpropertyEventFilterRequest() ) transcode.return_value = { "method": "post", @@ -74573,7 +76336,7 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): ) ) - request = analytics_admin.CreateSubpropertyEventFilterRequest() + request = analytics_admin.UpdateSubpropertyEventFilterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -74581,7 +76344,7 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() - client.create_subproperty_event_filter( + client.update_subproperty_event_filter( request, metadata=[ ("key", "val"), @@ -74593,9 +76356,9 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_subproperty_event_filter_rest_bad_request( +def test_update_subproperty_event_filter_rest_bad_request( transport: str = "rest", - request_type=analytics_admin.CreateSubpropertyEventFilterRequest, + request_type=analytics_admin.UpdateSubpropertyEventFilterRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -74603,7 +76366,11 @@ def test_create_subproperty_event_filter_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = { + "subproperty_event_filter": { + "name": "properties/sample1/subpropertyEventFilters/sample2" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -74615,10 +76382,10 @@ def test_create_subproperty_event_filter_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_subproperty_event_filter(request) + client.update_subproperty_event_filter(request) -def test_create_subproperty_event_filter_rest_flattened(): +def test_update_subproperty_event_filter_rest_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -74630,14 +76397,18 @@ def test_create_subproperty_event_filter_rest_flattened(): return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = { + "subproperty_event_filter": { + "name": "properties/sample1/subpropertyEventFilters/sample2" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -74652,20 +76423,20 @@ def test_create_subproperty_event_filter_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_subproperty_event_filter(**mock_args) + client.update_subproperty_event_filter(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/subpropertyEventFilters" + "%s/v1alpha/{subproperty_event_filter.name=properties/*/subpropertyEventFilters/*}" % client.transport._host, args[1], ) -def test_create_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): +def test_update_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -74674,16 +76445,273 @@ def test_create_subproperty_event_filter_rest_flattened_error(transport: str = " # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_subproperty_event_filter( - analytics_admin.CreateSubpropertyEventFilterRequest(), - parent="parent_value", + client.update_subproperty_event_filter( + analytics_admin.UpdateSubpropertyEventFilterRequest(), subproperty_event_filter=gaa_subproperty_event_filter.SubpropertyEventFilter( name="name_value" ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_subproperty_event_filter_rest_error(): +def test_update_subproperty_event_filter_rest_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.DeleteSubpropertyEventFilterRequest, + dict, + ], +) +def test_delete_subproperty_event_filter_rest(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_subproperty_event_filter(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subproperty_event_filter_rest_required_fields( + request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subproperty_event_filter._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subproperty_event_filter._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_subproperty_event_filter(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_subproperty_event_filter_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_subproperty_event_filter._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_subproperty_event_filter_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_delete_subproperty_event_filter", + ) as pre: + pre.assert_not_called() + pb_message = analytics_admin.DeleteSubpropertyEventFilterRequest.pb( + analytics_admin.DeleteSubpropertyEventFilterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = analytics_admin.DeleteSubpropertyEventFilterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_subproperty_event_filter( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_subproperty_event_filter_rest_bad_request( + transport: str = "rest", + request_type=analytics_admin.DeleteSubpropertyEventFilterRequest, +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/subpropertyEventFilters/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_subproperty_event_filter(request) + + +def test_delete_subproperty_event_filter_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/subpropertyEventFilters/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_subproperty_event_filter(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/subpropertyEventFilters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_subproperty_event_filter_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subproperty_event_filter( + analytics_admin.DeleteSubpropertyEventFilterRequest(), + name="name_value", + ) + + +def test_delete_subproperty_event_filter_rest_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -74954,8 +76982,11 @@ def test_analytics_admin_service_base_transport(): "create_rollup_property_source_link", "delete_rollup_property_source_link", "create_subproperty", - "delete_subproperty_event_filter", "create_subproperty_event_filter", + "get_subproperty_event_filter", + "list_subproperty_event_filters", + "update_subproperty_event_filter", + "delete_subproperty_event_filter", ) for method in methods: with pytest.raises(NotImplementedError): @@ -75636,12 +77667,21 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.create_subproperty._session session2 = client2.transport.create_subproperty._session assert session1 != session2 - session1 = client1.transport.delete_subproperty_event_filter._session - session2 = client2.transport.delete_subproperty_event_filter._session - assert session1 != session2 session1 = client1.transport.create_subproperty_event_filter._session session2 = client2.transport.create_subproperty_event_filter._session assert session1 != session2 + session1 = client1.transport.get_subproperty_event_filter._session + session2 = client2.transport.get_subproperty_event_filter._session + assert session1 != session2 + session1 = client1.transport.list_subproperty_event_filters._session + session2 = client2.transport.list_subproperty_event_filters._session + assert session1 != session2 + session1 = client1.transport.update_subproperty_event_filter._session + session2 = client2.transport.update_subproperty_event_filter._session + assert session1 != session2 + session1 = client1.transport.delete_subproperty_event_filter._session + session2 = client2.transport.delete_subproperty_event_filter._session + assert session1 != session2 def test_analytics_admin_service_grpc_transport_channel(): diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index ece5527160bc..1ed04a422c3e 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.18.3](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.2...google-analytics-data-v0.18.3) (2024-01-24) + + +### Features + +* add the `webhook_notification` field to the `AudienceList` resource ([29e65f8](https://github.com/googleapis/google-cloud-python/commit/29e65f8f6e32636e934bd494f15448656f0ce7d7)) +* add the `webhook_notification` field to the `RecurringAudienceList` resource ([29e65f8](https://github.com/googleapis/google-cloud-python/commit/29e65f8f6e32636e934bd494f15448656f0ce7d7)) +* add the `WebhookNotification` type ([29e65f8](https://github.com/googleapis/google-cloud-python/commit/29e65f8f6e32636e934bd494f15448656f0ce7d7)) + + +### Documentation + +* announce that `ListAudienceLists`, `GetAudienceList`, `QueryAudienceList`, `CreateAudienceList` methods are now available in the v1beta version of the Data API ([29e65f8](https://github.com/googleapis/google-cloud-python/commit/29e65f8f6e32636e934bd494f15448656f0ce7d7)) + +## [0.18.2](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.1...google-analytics-data-v0.18.2) (2023-12-09) + + +### Features + +* [google-analytics-data] add `CreateAudienceExport`, `QueryAudienceExport`, `GetAudienceExport`, `ListAudienceExports` methods to the Data API v1 beta ([182c4cf](https://github.com/googleapis/google-cloud-python/commit/182c4cf16e7e1eef2819396a5a0b590a81af6a58)) +* add `sampling_metadatas` field to `ResponseMetaData` ([182c4cf](https://github.com/googleapis/google-cloud-python/commit/182c4cf16e7e1eef2819396a5a0b590a81af6a58)) +* add `SamplingMetadata`, `AudienceExport`, `AudienceExportMetadata`, `AudienceDimensionValue` types ([182c4cf](https://github.com/googleapis/google-cloud-python/commit/182c4cf16e7e1eef2819396a5a0b590a81af6a58)) + + +### Bug Fixes + +* add `optional` label to `consumed`, `remaining` fields of the `QuotaStatus` type ([182c4cf](https://github.com/googleapis/google-cloud-python/commit/182c4cf16e7e1eef2819396a5a0b590a81af6a58)) + + +### Documentation + +* updated comments ([182c4cf](https://github.com/googleapis/google-cloud-python/commit/182c4cf16e7e1eef2819396a5a0b590a81af6a58)) + ## [0.18.1](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.0...google-analytics-data-v0.18.1) (2023-12-07) diff --git a/packages/google-analytics-data/docs/data_v1beta/beta_analytics_data.rst b/packages/google-analytics-data/docs/data_v1beta/beta_analytics_data.rst index 7bcaa51b6b5e..b5c0de656c3f 100644 --- a/packages/google-analytics-data/docs/data_v1beta/beta_analytics_data.rst +++ b/packages/google-analytics-data/docs/data_v1beta/beta_analytics_data.rst @@ -4,3 +4,7 @@ BetaAnalyticsData .. automodule:: google.analytics.data_v1beta.services.beta_analytics_data :members: :inherited-members: + +.. automodule:: google.analytics.data_v1beta.services.beta_analytics_data.pagers + :members: + :inherited-members: diff --git a/packages/google-analytics-data/google/analytics/data/__init__.py b/packages/google-analytics-data/google/analytics/data/__init__.py index f86a037d1df2..6415f667e119 100644 --- a/packages/google-analytics-data/google/analytics/data/__init__.py +++ b/packages/google-analytics-data/google/analytics/data/__init__.py @@ -25,14 +25,25 @@ BetaAnalyticsDataClient, ) from google.analytics.data_v1beta.types.analytics_data_api import ( + AudienceDimension, + AudienceDimensionValue, + AudienceExport, + AudienceExportMetadata, + AudienceRow, BatchRunPivotReportsRequest, BatchRunPivotReportsResponse, BatchRunReportsRequest, BatchRunReportsResponse, CheckCompatibilityRequest, CheckCompatibilityResponse, + CreateAudienceExportRequest, + GetAudienceExportRequest, GetMetadataRequest, + ListAudienceExportsRequest, + ListAudienceExportsResponse, Metadata, + QueryAudienceExportRequest, + QueryAudienceExportResponse, RunPivotReportRequest, RunPivotReportResponse, RunRealtimeReportRequest, @@ -74,19 +85,31 @@ ResponseMetaData, RestrictedMetricType, Row, + SamplingMetadata, ) __all__ = ( "BetaAnalyticsDataClient", "BetaAnalyticsDataAsyncClient", + "AudienceDimension", + "AudienceDimensionValue", + "AudienceExport", + "AudienceExportMetadata", + "AudienceRow", "BatchRunPivotReportsRequest", "BatchRunPivotReportsResponse", "BatchRunReportsRequest", "BatchRunReportsResponse", "CheckCompatibilityRequest", "CheckCompatibilityResponse", + "CreateAudienceExportRequest", + "GetAudienceExportRequest", "GetMetadataRequest", + "ListAudienceExportsRequest", + "ListAudienceExportsResponse", "Metadata", + "QueryAudienceExportRequest", + "QueryAudienceExportResponse", "RunPivotReportRequest", "RunPivotReportResponse", "RunRealtimeReportRequest", @@ -122,6 +145,7 @@ "QuotaStatus", "ResponseMetaData", "Row", + "SamplingMetadata", "Compatibility", "MetricAggregation", "MetricType", diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index 10aeefebafee..60de74a4be61 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.1" # {x-release-please-version} +__version__ = "0.18.3" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index d18361882523..33f9d7d70b99 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -43,6 +43,7 @@ RunFunnelReportResponse, SheetExportAudienceListRequest, SheetExportAudienceListResponse, + WebhookNotification, ) from .types.data import ( BetweenFilter, @@ -196,4 +197,5 @@ "UserSegmentExclusion", "UserSegmentSequenceGroup", "UserSequenceStep", + "WebhookNotification", ) diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index 10aeefebafee..60de74a4be61 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.1" # {x-release-please-version} +__version__ = "0.18.3" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index a22498aba2da..39a863714d7d 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -44,6 +44,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.analytics.data_v1alpha.services.alpha_analytics_data import pagers @@ -359,9 +360,9 @@ async def create_audience_list( see https://support.google.com/analytics/answer/9267572. Audience lists contain the users in each audience. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -509,9 +510,9 @@ async def query_audience_list( in the ways that are important to your business. To learn more, see https://support.google.com/analytics/answer/9267572. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -748,9 +749,9 @@ async def get_audience_list( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -869,9 +870,9 @@ async def list_audience_lists( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 6b85ebe12f2f..0680e9fc552c 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -48,6 +48,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.analytics.data_v1alpha.services.alpha_analytics_data import pagers @@ -597,9 +598,9 @@ def create_audience_list( see https://support.google.com/analytics/answer/9267572. Audience lists contain the users in each audience. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -747,9 +748,9 @@ def query_audience_list( in the ways that are important to your business. To learn more, see https://support.google.com/analytics/answer/9267572. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -988,9 +989,9 @@ def get_audience_list( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -1109,9 +1110,9 @@ def list_audience_lists( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index 51099b54aa49..175f6f75fbdb 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -319,9 +319,9 @@ def create_audience_list( see https://support.google.com/analytics/answer/9267572. Audience lists contain the users in each audience. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -366,9 +366,9 @@ def query_audience_list( in the ways that are important to your business. To learn more, see https://support.google.com/analytics/answer/9267572. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -454,9 +454,9 @@ def get_audience_list( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -497,9 +497,9 @@ def list_audience_lists( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index cc690de74c26..54f893cd7c3e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -325,9 +325,9 @@ def create_audience_list( see https://support.google.com/analytics/answer/9267572. Audience lists contain the users in each audience. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.create `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -372,9 +372,9 @@ def query_audience_list( in the ways that are important to your business. To learn more, see https://support.google.com/analytics/answer/9267572. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.query `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -461,9 +461,9 @@ def get_audience_list( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.get `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. @@ -504,9 +504,9 @@ def list_audience_lists( List `__ for an introduction to Audience Lists with examples. - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google + This method is available at beta stability at + `audienceExports.list `__. + To give your feedback on this API, complete the `Google Analytics Audience Export API Feedback `__ form. diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index ce1bfdc45137..186444acba19 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -34,6 +34,7 @@ RunFunnelReportResponse, SheetExportAudienceListRequest, SheetExportAudienceListResponse, + WebhookNotification, ) from .data import ( BetweenFilter, @@ -122,6 +123,7 @@ "RunFunnelReportResponse", "SheetExportAudienceListRequest", "SheetExportAudienceListResponse", + "WebhookNotification", "BetweenFilter", "DateRange", "Dimension", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 4047913901c5..25f568adb78c 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -27,6 +27,7 @@ manifest={ "CreateRecurringAudienceListRequest", "RecurringAudienceList", + "WebhookNotification", "GetRecurringAudienceListRequest", "ListRecurringAudienceListsRequest", "ListRecurringAudienceListsResponse", @@ -129,6 +130,27 @@ class RecurringAudienceList(proto.Message): This list is ordered with the most recently created audience list first. + webhook_notification (google.analytics.data_v1alpha.types.WebhookNotification): + Optional. Configures webhook notifications to + be sent from the Google Analytics Data API to + your webhook server. Use of webhooks is + optional. If unused, you'll need to poll this + API to determine when a recurring audience list + creates new audience lists. Webhooks allow a + notification to be sent to your servers & avoid + the need for polling. + + Two POST requests will be sent each time a + recurring audience list creates an audience + list. This happens once per day until a + recurring audience list reaches 0 active days + remaining. The first request will be sent + showing a newly created audience list in its + CREATING state. The second request will be sent + after the audience list completes creation + (either the ACTIVE or FAILED state). + + This field is a member of `oneof`_ ``_webhook_notification``. """ name: str = proto.Field( @@ -157,6 +179,95 @@ class RecurringAudienceList(proto.Message): proto.STRING, number=6, ) + webhook_notification: "WebhookNotification" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="WebhookNotification", + ) + + +class WebhookNotification(proto.Message): + r"""Configures a long-running operation resource to send a + webhook notification from the Google Analytics Data API to your + webhook server when the resource updates. + + Notification configurations contain private values & are only + visible to your GCP project. Different GCP projects may attach + different webhook notifications to the same long-running + operation resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + Optional. The web address that will receive the webhook + notification. This address will receive POST requests as the + state of the long running operation resource changes. The + POST request will contain both a JSON version of the long + running operation resource in the body and a + ``sentTimestamp`` field. The sent timestamp will specify the + unix microseconds since the epoch that the request was sent; + this lets you identify replayed notifications. + + An example URI is + ``https://us-central1-example-project-id.cloudfunctions.net/example-function-1``. + + The URI must use HTTPS and point to a site with a valid SSL + certificate on the web server. The URI must have a maximum + string length of 128 characters & use only the allowlisted + characters from `RFC + 1738 `__. + + When your webhook server receives a notification, it is + expected to reply with an HTTP response status code of 200 + within 5 seconds. + + A URI is required to use webhook notifications. + + Requests to this webhook server will contain an ID token + authenticating the service account + ``google-analytics-audience-export@system.gserviceaccount.com``. + To learn more about ID tokens, see + https://cloud.google.com/docs/authentication/token-types#id. + For Google Cloud Functions, this lets you configure your + function to require authentication. In Cloud IAM, you will + need to grant the service account permissions to the Cloud + Run Invoker (``roles/run.invoker``) & Cloud Functions + Invoker (``roles/cloudfunctions.invoker``) roles for the + webhook post request to pass Google Cloud Functions + authentication. This API can send webhook notifications to + arbitrary URIs; for webhook servers other than Google Cloud + Functions, this ID token in the authorization bearer header + should be ignored if it is not needed. + + This field is a member of `oneof`_ ``_uri``. + channel_token (str): + Optional. The channel token is an arbitrary string value and + must have a maximum string length of 64 characters. Channel + tokens allow you to verify the source of a webhook + notification. This guards against the message being spoofed. + The channel token will be specified in the + ``X-Goog-Channel-Token`` HTTP header of the webhook POST + request. + + A channel token is not required to use webhook + notifications. + + This field is a member of `oneof`_ ``_channel_token``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + channel_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) class GetRecurringAudienceListRequest(proto.Message): @@ -427,6 +538,33 @@ class AudienceList(proto.Message): and this field will be blank. This field is a member of `oneof`_ ``_recurring_audience_list``. + webhook_notification (google.analytics.data_v1alpha.types.WebhookNotification): + Optional. Configures webhook notifications to + be sent from the Google Analytics Data API to + your webhook server. Use of webhooks is + optional. If unused, you'll need to poll this + API to determine when an audience list is ready + to be used. Webhooks allow a notification to be + sent to your servers & avoid the need for + polling. + + Either one or two POST requests will be sent to + the webhook. The first POST request will be sent + immediately showing the newly created audience + list in its CREATING state. The second POST + request will be sent after the audience list + completes creation (either the ACTIVE or FAILED + state). + + If identical audience lists are requested in + quick succession, the second & subsequent + audience lists can be served from cache. In that + case, the audience list create method can return + an audience list is already ACTIVE. In this + scenario, only one POST request will be sent to + the webhook. + + This field is a member of `oneof`_ ``_webhook_notification``. """ class State(proto.Enum): @@ -508,6 +646,12 @@ class State(proto.Enum): number=12, optional=True, ) + webhook_notification: "WebhookNotification" = proto.Field( + proto.MESSAGE, + number=13, + optional=True, + message="WebhookNotification", + ) class AudienceListMetadata(proto.Message): diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py b/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py index 8ace2e93547f..037f237e3ad6 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/__init__.py @@ -23,14 +23,25 @@ BetaAnalyticsDataClient, ) from .types.analytics_data_api import ( + AudienceDimension, + AudienceDimensionValue, + AudienceExport, + AudienceExportMetadata, + AudienceRow, BatchRunPivotReportsRequest, BatchRunPivotReportsResponse, BatchRunReportsRequest, BatchRunReportsResponse, CheckCompatibilityRequest, CheckCompatibilityResponse, + CreateAudienceExportRequest, + GetAudienceExportRequest, GetMetadataRequest, + ListAudienceExportsRequest, + ListAudienceExportsResponse, Metadata, + QueryAudienceExportRequest, + QueryAudienceExportResponse, RunPivotReportRequest, RunPivotReportResponse, RunRealtimeReportRequest, @@ -72,10 +83,16 @@ ResponseMetaData, RestrictedMetricType, Row, + SamplingMetadata, ) __all__ = ( "BetaAnalyticsDataAsyncClient", + "AudienceDimension", + "AudienceDimensionValue", + "AudienceExport", + "AudienceExportMetadata", + "AudienceRow", "BatchRunPivotReportsRequest", "BatchRunPivotReportsResponse", "BatchRunReportsRequest", @@ -88,6 +105,7 @@ "CohortSpec", "CohortsRange", "Compatibility", + "CreateAudienceExportRequest", "DateRange", "Dimension", "DimensionCompatibility", @@ -98,7 +116,10 @@ "Filter", "FilterExpression", "FilterExpressionList", + "GetAudienceExportRequest", "GetMetadataRequest", + "ListAudienceExportsRequest", + "ListAudienceExportsResponse", "Metadata", "Metric", "MetricAggregation", @@ -114,6 +135,8 @@ "PivotDimensionHeader", "PivotHeader", "PropertyQuota", + "QueryAudienceExportRequest", + "QueryAudienceExportResponse", "QuotaStatus", "ResponseMetaData", "RestrictedMetricType", @@ -124,4 +147,5 @@ "RunRealtimeReportResponse", "RunReportRequest", "RunReportResponse", + "SamplingMetadata", ) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_metadata.json index e1e3750c4252..a00d9bb919b6 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_metadata.json @@ -25,11 +25,31 @@ "check_compatibility" ] }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, "GetMetadata": { "methods": [ "get_metadata" ] }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, "RunPivotReport": { "methods": [ "run_pivot_report" @@ -65,11 +85,31 @@ "check_compatibility" ] }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, "GetMetadata": { "methods": [ "get_metadata" ] }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, "RunPivotReport": { "methods": [ "run_pivot_report" @@ -105,11 +145,31 @@ "check_compatibility" ] }, + "CreateAudienceExport": { + "methods": [ + "create_audience_export" + ] + }, + "GetAudienceExport": { + "methods": [ + "get_audience_export" + ] + }, "GetMetadata": { "methods": [ "get_metadata" ] }, + "ListAudienceExports": { + "methods": [ + "list_audience_exports" + ] + }, + "QueryAudienceExport": { + "methods": [ + "query_audience_export" + ] + }, "RunPivotReport": { "methods": [ "run_pivot_report" diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index 10aeefebafee..60de74a4be61 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.1" # {x-release-please-version} +__version__ = "0.18.3" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py index 704b9e3ecad7..0d923673401f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py @@ -42,6 +42,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.analytics.data_v1beta.services.beta_analytics_data import pagers from google.analytics.data_v1beta.types import analytics_data_api, data from .client import BetaAnalyticsDataClient @@ -57,6 +63,10 @@ class BetaAnalyticsDataAsyncClient: DEFAULT_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT + audience_export_path = staticmethod(BetaAnalyticsDataClient.audience_export_path) + parse_audience_export_path = staticmethod( + BetaAnalyticsDataClient.parse_audience_export_path + ) metadata_path = staticmethod(BetaAnalyticsDataClient.metadata_path) parse_metadata_path = staticmethod(BetaAnalyticsDataClient.parse_metadata_path) common_billing_account_path = staticmethod( @@ -626,8 +636,9 @@ async def sample_get_metadata(): Returns: google.analytics.data_v1beta.types.Metadata: - The dimensions and metrics currently - accepted in reporting methods. + The dimensions, metrics and + comparisons currently accepted in + reporting methods. """ # Create or coerce a protobuf request object. @@ -859,6 +870,544 @@ async def sample_check_compatibility(): # Done; return the response. return response + async def create_audience_export( + self, + request: Optional[ + Union[analytics_data_api.CreateAudienceExportRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + audience_export: Optional[analytics_data_api.AudienceExport] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.CreateAudienceExportRequest, dict]]): + The request object. A request to create a new audience + export. + parent (:class:`str`): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_export (:class:`google.analytics.data_v1beta.types.AudienceExport`): + Required. The audience export to + create. + + This corresponds to the ``audience_export`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1beta.types.AudienceExport` An audience export is a list of users in an audience at the time of the + list's creation. One audience may have multiple + audience exports created for different days. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_export]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_data_api.CreateAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_export is not None: + request.audience_export = audience_export + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_audience_export, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analytics_data_api.AudienceExport, + metadata_type=analytics_data_api.AudienceExportMetadata, + ) + + # Done; return the response. + return response + + async def query_audience_export( + self, + request: Optional[ + Union[analytics_data_api.QueryAudienceExportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.QueryAudienceExportRequest, dict]]): + The request object. A request to list users in an + audience export. + name (:class:`str`): + Required. The name of the audience export to retrieve + users from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_data_api.QueryAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_audience_export, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_audience_export( + self, + request: Optional[ + Union[analytics_data_api.GetAudienceExportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceExport: + r"""Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.GetAudienceExportRequest, dict]]): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + name (:class:`str`): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_data_api.GetAudienceExportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_audience_export, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_audience_exports( + self, + request: Optional[ + Union[analytics_data_api.ListAudienceExportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceExportsAsyncPager: + r"""Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + async def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1beta.types.ListAudienceExportsRequest, dict]]): + The request object. A request to list all audience + exports for a property. + parent (:class:`str`): + Required. All audience exports for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsAsyncPager: + A list of all audience exports for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analytics_data_api.ListAudienceExportsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_audience_exports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAudienceExportsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "BetaAnalyticsDataAsyncClient": return self diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index b0659c09a374..64d8e9aa9fd8 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -46,6 +46,12 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.analytics.data_v1beta.services.beta_analytics_data import pagers from google.analytics.data_v1beta.types import analytics_data_api, data from .transports.base import DEFAULT_CLIENT_INFO, BetaAnalyticsDataTransport @@ -176,6 +182,26 @@ def transport(self) -> BetaAnalyticsDataTransport: """ return self._transport + @staticmethod + def audience_export_path( + property: str, + audience_export: str, + ) -> str: + """Returns a fully-qualified audience_export string.""" + return "properties/{property}/audienceExports/{audience_export}".format( + property=property, + audience_export=audience_export, + ) + + @staticmethod + def parse_audience_export_path(path: str) -> Dict[str, str]: + """Parses a audience_export path into its component segments.""" + m = re.match( + r"^properties/(?P.+?)/audienceExports/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def metadata_path( property: str, @@ -852,8 +878,9 @@ def sample_get_metadata(): Returns: google.analytics.data_v1beta.types.Metadata: - The dimensions and metrics currently - accepted in reporting methods. + The dimensions, metrics and + comparisons currently accepted in + reporting methods. """ # Create or coerce a protobuf request object. @@ -1087,6 +1114,544 @@ def sample_check_compatibility(): # Done; return the response. return response + def create_audience_export( + self, + request: Optional[ + Union[analytics_data_api.CreateAudienceExportRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + audience_export: Optional[analytics_data_api.AudienceExport] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.CreateAudienceExportRequest, dict]): + The request object. A request to create a new audience + export. + parent (str): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Required. The audience export to + create. + + This corresponds to the ``audience_export`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.analytics.data_v1beta.types.AudienceExport` An audience export is a list of users in an audience at the time of the + list's creation. One audience may have multiple + audience exports created for different days. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, audience_export]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_data_api.CreateAudienceExportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_data_api.CreateAudienceExportRequest): + request = analytics_data_api.CreateAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if audience_export is not None: + request.audience_export = audience_export + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analytics_data_api.AudienceExport, + metadata_type=analytics_data_api.AudienceExportMetadata, + ) + + # Done; return the response. + return response + + def query_audience_export( + self, + request: Optional[ + Union[analytics_data_api.QueryAudienceExportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.QueryAudienceExportRequest, dict]): + The request object. A request to list users in an + audience export. + name (str): + Required. The name of the audience export to retrieve + users from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_data_api.QueryAudienceExportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_data_api.QueryAudienceExportRequest): + request = analytics_data_api.QueryAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_audience_export( + self, + request: Optional[ + Union[analytics_data_api.GetAudienceExportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceExport: + r"""Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_export(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.GetAudienceExportRequest, dict]): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + name (str): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.types.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_data_api.GetAudienceExportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_data_api.GetAudienceExportRequest): + request = analytics_data_api.GetAudienceExportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_audience_export] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_audience_exports( + self, + request: Optional[ + Union[analytics_data_api.ListAudienceExportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAudienceExportsPager: + r"""Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1beta + + def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.analytics.data_v1beta.types.ListAudienceExportsRequest, dict]): + The request object. A request to list all audience + exports for a property. + parent (str): + Required. All audience exports for this property will be + listed in the response. Format: + ``properties/{property}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsPager: + A list of all audience exports for a + property. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analytics_data_api.ListAudienceExportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analytics_data_api.ListAudienceExportsRequest): + request = analytics_data_api.ListAudienceExportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_audience_exports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAudienceExportsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BetaAnalyticsDataClient": return self diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py new file mode 100644 index 000000000000..1c7a8807e581 --- /dev/null +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.analytics.data_v1beta.types import analytics_data_api + + +class ListAudienceExportsPager: + """A pager for iterating through ``list_audience_exports`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``audience_exports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAudienceExports`` requests and continue to iterate + through the ``audience_exports`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analytics_data_api.ListAudienceExportsResponse], + request: analytics_data_api.ListAudienceExportsRequest, + response: analytics_data_api.ListAudienceExportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1beta.types.ListAudienceExportsRequest): + The initial request object. + response (google.analytics.data_v1beta.types.ListAudienceExportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceExportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analytics_data_api.ListAudienceExportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analytics_data_api.AudienceExport]: + for page in self.pages: + yield from page.audience_exports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAudienceExportsAsyncPager: + """A pager for iterating through ``list_audience_exports`` requests. + + This class thinly wraps an initial + :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``audience_exports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAudienceExports`` requests and continue to iterate + through the ``audience_exports`` field on the + corresponding responses. + + All the usual :class:`google.analytics.data_v1beta.types.ListAudienceExportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[analytics_data_api.ListAudienceExportsResponse] + ], + request: analytics_data_api.ListAudienceExportsRequest, + response: analytics_data_api.ListAudienceExportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.analytics.data_v1beta.types.ListAudienceExportsRequest): + The initial request object. + response (google.analytics.data_v1beta.types.ListAudienceExportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analytics_data_api.ListAudienceExportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[analytics_data_api.ListAudienceExportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analytics_data_api.AudienceExport]: + async def async_generator(): + async for page in self.pages: + for response in page.audience_exports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py index 7a50afa7d266..9f76caeeca5a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py @@ -18,10 +18,11 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.analytics.data_v1beta import gapic_version as package_version @@ -160,6 +161,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_audience_export: gapic_v1.method.wrap_method( + self.create_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.query_audience_export: gapic_v1.method.wrap_method( + self.query_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.get_audience_export: gapic_v1.method.wrap_method( + self.get_audience_export, + default_timeout=None, + client_info=client_info, + ), + self.list_audience_exports: gapic_v1.method.wrap_method( + self.list_audience_exports, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -171,6 +192,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def run_report( self, @@ -252,6 +278,51 @@ def check_compatibility( ]: raise NotImplementedError() + @property + def create_audience_export( + self, + ) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def query_audience_export( + self, + ) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + Union[ + analytics_data_api.QueryAudienceExportResponse, + Awaitable[analytics_data_api.QueryAudienceExportResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_audience_export( + self, + ) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + Union[ + analytics_data_api.AudienceExport, + Awaitable[analytics_data_api.AudienceExport], + ], + ]: + raise NotImplementedError() + + @property + def list_audience_exports( + self, + ) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + Union[ + analytics_data_api.ListAudienceExportsResponse, + Awaitable[analytics_data_api.ListAudienceExportsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py index dcf17bd1e155..4237b8da0190 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py @@ -16,10 +16,11 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.analytics.data_v1beta.types import analytics_data_api @@ -110,6 +111,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -228,6 +230,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def run_report( self, @@ -476,6 +492,194 @@ def check_compatibility( ) return self._stubs["check_compatibility"] + @property + def create_audience_export( + self, + ) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create audience export method over gRPC. + + Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.CreateAudienceExportRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_audience_export" not in self._stubs: + self._stubs["create_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/CreateAudienceExport", + request_serializer=analytics_data_api.CreateAudienceExportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_audience_export"] + + @property + def query_audience_export( + self, + ) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + analytics_data_api.QueryAudienceExportResponse, + ]: + r"""Return a callable for the query audience export method over gRPC. + + Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.QueryAudienceExportRequest], + ~.QueryAudienceExportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_audience_export" not in self._stubs: + self._stubs["query_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/QueryAudienceExport", + request_serializer=analytics_data_api.QueryAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceExportResponse.deserialize, + ) + return self._stubs["query_audience_export"] + + @property + def get_audience_export( + self, + ) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], analytics_data_api.AudienceExport + ]: + r"""Return a callable for the get audience export method over gRPC. + + Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.GetAudienceExportRequest], + ~.AudienceExport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_audience_export" not in self._stubs: + self._stubs["get_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/GetAudienceExport", + request_serializer=analytics_data_api.GetAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.AudienceExport.deserialize, + ) + return self._stubs["get_audience_export"] + + @property + def list_audience_exports( + self, + ) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + analytics_data_api.ListAudienceExportsResponse, + ]: + r"""Return a callable for the list audience exports method over gRPC. + + Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.ListAudienceExportsRequest], + ~.ListAudienceExportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_audience_exports" not in self._stubs: + self._stubs["list_audience_exports"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/ListAudienceExports", + request_serializer=analytics_data_api.ListAudienceExportsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceExportsResponse.deserialize, + ) + return self._stubs["list_audience_exports"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py index 06a8e2206fde..5ded3483f517 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py @@ -16,9 +16,10 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -156,6 +157,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -231,6 +233,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def run_report( self, @@ -482,6 +500,196 @@ def check_compatibility( ) return self._stubs["check_compatibility"] + @property + def create_audience_export( + self, + ) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create audience export method over gRPC. + + Creates an audience export for later retrieval. This method + quickly returns the audience export's resource name and + initiates a long running asynchronous request to form an + audience export. To export the users in an audience export, + first create the audience export through this method and then + send the audience resource name to the ``QueryAudienceExport`` + method. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + An audience export is a snapshot of the users currently in the + audience at the time of audience export creation. Creating + audience exports for one audience on different days will return + different results as users enter and exit the audience. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + Audience exports contain the users in each audience. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.CreateAudienceExportRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_audience_export" not in self._stubs: + self._stubs["create_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/CreateAudienceExport", + request_serializer=analytics_data_api.CreateAudienceExportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_audience_export"] + + @property + def query_audience_export( + self, + ) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + Awaitable[analytics_data_api.QueryAudienceExportResponse], + ]: + r"""Return a callable for the query audience export method over gRPC. + + Retrieves an audience export of users. After creating an + audience, the users are not immediately available for exporting. + First, a request to ``CreateAudienceExport`` is necessary to + create an audience export of users, and then second, this method + is used to retrieve the users in the audience export. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audiences in Google Analytics 4 allow you to segment your users + in the ways that are important to your business. To learn more, + see https://support.google.com/analytics/answer/9267572. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.QueryAudienceExportRequest], + Awaitable[~.QueryAudienceExportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_audience_export" not in self._stubs: + self._stubs["query_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/QueryAudienceExport", + request_serializer=analytics_data_api.QueryAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.QueryAudienceExportResponse.deserialize, + ) + return self._stubs["query_audience_export"] + + @property + def get_audience_export( + self, + ) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], + Awaitable[analytics_data_api.AudienceExport], + ]: + r"""Return a callable for the get audience export method over gRPC. + + Gets configuration metadata about a specific audience export. + This method can be used to understand an audience export after + it has been created. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.GetAudienceExportRequest], + Awaitable[~.AudienceExport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_audience_export" not in self._stubs: + self._stubs["get_audience_export"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/GetAudienceExport", + request_serializer=analytics_data_api.GetAudienceExportRequest.serialize, + response_deserializer=analytics_data_api.AudienceExport.deserialize, + ) + return self._stubs["get_audience_export"] + + @property + def list_audience_exports( + self, + ) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + Awaitable[analytics_data_api.ListAudienceExportsResponse], + ]: + r"""Return a callable for the list audience exports method over gRPC. + + Lists all audience exports for a property. This method can be + used for you to find and reuse existing audience exports rather + than creating unnecessary new audience exports. The same + audience can have multiple audience exports that represent the + export of users that were in an audience on different days. + + See `Creating an Audience + Export `__ + for an introduction to Audience Exports with examples. + + Audience Export APIs have some methods at alpha and other + methods at beta stability. The intention is to advance methods + to beta stability after some feedback and adoption. To give your + feedback on this API, complete the `Google Analytics Audience + Export API Feedback `__ + form. + + Returns: + Callable[[~.ListAudienceExportsRequest], + Awaitable[~.ListAudienceExportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_audience_exports" not in self._stubs: + self._stubs["list_audience_exports"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1beta.BetaAnalyticsData/ListAudienceExports", + request_serializer=analytics_data_api.ListAudienceExportsRequest.serialize, + response_deserializer=analytics_data_api.ListAudienceExportsResponse.deserialize, + ) + return self._stubs["list_audience_exports"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py index 89b5378a9527..e42ed6342419 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py @@ -20,7 +20,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -36,6 +42,8 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.analytics.data_v1beta.types import analytics_data_api from .base import BetaAnalyticsDataTransport @@ -87,6 +95,22 @@ def post_check_compatibility(self, response): logging.log(f"Received response: {response}") return response + def pre_create_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_metadata(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -95,6 +119,22 @@ def post_get_metadata(self, response): logging.log(f"Received response: {response}") return response + def pre_list_audience_exports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_audience_exports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_audience_export(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_audience_export(self, response): + logging.log(f"Received response: {response}") + return response + def pre_run_pivot_report(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -196,6 +236,54 @@ def post_check_compatibility( """ return response + def pre_create_audience_export( + self, + request: analytics_data_api.CreateAudienceExportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.CreateAudienceExportRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_create_audience_export( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_get_audience_export( + self, + request: analytics_data_api.GetAudienceExportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[analytics_data_api.GetAudienceExportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_get_audience_export( + self, response: analytics_data_api.AudienceExport + ) -> analytics_data_api.AudienceExport: + """Post-rpc interceptor for get_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_get_metadata( self, request: analytics_data_api.GetMetadataRequest, @@ -219,6 +307,56 @@ def post_get_metadata( """ return response + def pre_list_audience_exports( + self, + request: analytics_data_api.ListAudienceExportsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.ListAudienceExportsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_audience_exports + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_list_audience_exports( + self, response: analytics_data_api.ListAudienceExportsResponse + ) -> analytics_data_api.ListAudienceExportsResponse: + """Post-rpc interceptor for list_audience_exports + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + + def pre_query_audience_export( + self, + request: analytics_data_api.QueryAudienceExportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.QueryAudienceExportRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for query_audience_export + + Override in a subclass to manipulate the request or metadata + before they are sent to the BetaAnalyticsData server. + """ + return request, metadata + + def post_query_audience_export( + self, response: analytics_data_api.QueryAudienceExportResponse + ) -> analytics_data_api.QueryAudienceExportResponse: + """Post-rpc interceptor for query_audience_export + + Override in a subclass to manipulate the response + after it is returned by the BetaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_run_pivot_report( self, request: analytics_data_api.RunPivotReportRequest, @@ -380,11 +518,39 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or BetaAnalyticsDataRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _BatchRunPivotReports(BetaAnalyticsDataRestStub): def __hash__(self): return hash("BatchRunPivotReports") @@ -656,6 +822,200 @@ def __call__( resp = self._interceptor.post_check_compatibility(resp) return resp + class _CreateAudienceExport(BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("CreateAudienceExport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.CreateAudienceExportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create audience export method over HTTP. + + Args: + request (~.analytics_data_api.CreateAudienceExportRequest): + The request object. A request to create a new audience + export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=properties/*}/audienceExports", + "body": "audience_export", + }, + ] + request, metadata = self._interceptor.pre_create_audience_export( + request, metadata + ) + pb_request = analytics_data_api.CreateAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_audience_export(resp) + return resp + + class _GetAudienceExport(BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("GetAudienceExport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.GetAudienceExportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.AudienceExport: + r"""Call the get audience export method over HTTP. + + Args: + request (~.analytics_data_api.GetAudienceExportRequest): + The request object. A request to retrieve configuration + metadata about a specific audience + export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.AudienceExport: + An audience export is a list of users + in an audience at the time of the list's + creation. One audience may have multiple + audience exports created for different + days. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=properties/*/audienceExports/*}", + }, + ] + request, metadata = self._interceptor.pre_get_audience_export( + request, metadata + ) + pb_request = analytics_data_api.GetAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.AudienceExport() + pb_resp = analytics_data_api.AudienceExport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_audience_export(resp) + return resp + class _GetMetadata(BetaAnalyticsDataRestStub): def __hash__(self): return hash("GetMetadata") @@ -692,8 +1052,9 @@ def __call__( Returns: ~.analytics_data_api.Metadata: - The dimensions and metrics currently - accepted in reporting methods. + The dimensions, metrics and + comparisons currently accepted in + reporting methods. """ @@ -745,6 +1106,197 @@ def __call__( resp = self._interceptor.post_get_metadata(resp) return resp + class _ListAudienceExports(BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("ListAudienceExports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.ListAudienceExportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.ListAudienceExportsResponse: + r"""Call the list audience exports method over HTTP. + + Args: + request (~.analytics_data_api.ListAudienceExportsRequest): + The request object. A request to list all audience + exports for a property. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.ListAudienceExportsResponse: + A list of all audience exports for a + property. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=properties/*}/audienceExports", + }, + ] + request, metadata = self._interceptor.pre_list_audience_exports( + request, metadata + ) + pb_request = analytics_data_api.ListAudienceExportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.ListAudienceExportsResponse() + pb_resp = analytics_data_api.ListAudienceExportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_audience_exports(resp) + return resp + + class _QueryAudienceExport(BetaAnalyticsDataRestStub): + def __hash__(self): + return hash("QueryAudienceExport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.QueryAudienceExportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.QueryAudienceExportResponse: + r"""Call the query audience export method over HTTP. + + Args: + request (~.analytics_data_api.QueryAudienceExportRequest): + The request object. A request to list users in an + audience export. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.QueryAudienceExportResponse: + A list of users in an audience + export. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=properties/*/audienceExports/*}:query", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_query_audience_export( + request, metadata + ) + pb_request = analytics_data_api.QueryAudienceExportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.QueryAudienceExportResponse() + pb_resp = analytics_data_api.QueryAudienceExportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_audience_export(resp) + return resp + class _RunPivotReport(BetaAnalyticsDataRestStub): def __hash__(self): return hash("RunPivotReport") @@ -1042,6 +1594,26 @@ def check_compatibility( # In C++ this would require a dynamic_cast return self._CheckCompatibility(self._session, self._host, self._interceptor) # type: ignore + @property + def create_audience_export( + self, + ) -> Callable[ + [analytics_data_api.CreateAudienceExportRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAudienceExport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_audience_export( + self, + ) -> Callable[ + [analytics_data_api.GetAudienceExportRequest], analytics_data_api.AudienceExport + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAudienceExport(self._session, self._host, self._interceptor) # type: ignore + @property def get_metadata( self, @@ -1050,6 +1622,28 @@ def get_metadata( # In C++ this would require a dynamic_cast return self._GetMetadata(self._session, self._host, self._interceptor) # type: ignore + @property + def list_audience_exports( + self, + ) -> Callable[ + [analytics_data_api.ListAudienceExportsRequest], + analytics_data_api.ListAudienceExportsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAudienceExports(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_audience_export( + self, + ) -> Callable[ + [analytics_data_api.QueryAudienceExportRequest], + analytics_data_api.QueryAudienceExportResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryAudienceExport(self._session, self._host, self._interceptor) # type: ignore + @property def run_pivot_report( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py index f2eea48d30e7..3a88595a3843 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/__init__.py @@ -14,14 +14,25 @@ # limitations under the License. # from .analytics_data_api import ( + AudienceDimension, + AudienceDimensionValue, + AudienceExport, + AudienceExportMetadata, + AudienceRow, BatchRunPivotReportsRequest, BatchRunPivotReportsResponse, BatchRunReportsRequest, BatchRunReportsResponse, CheckCompatibilityRequest, CheckCompatibilityResponse, + CreateAudienceExportRequest, + GetAudienceExportRequest, GetMetadataRequest, + ListAudienceExportsRequest, + ListAudienceExportsResponse, Metadata, + QueryAudienceExportRequest, + QueryAudienceExportResponse, RunPivotReportRequest, RunPivotReportResponse, RunRealtimeReportRequest, @@ -63,17 +74,29 @@ ResponseMetaData, RestrictedMetricType, Row, + SamplingMetadata, ) __all__ = ( + "AudienceDimension", + "AudienceDimensionValue", + "AudienceExport", + "AudienceExportMetadata", + "AudienceRow", "BatchRunPivotReportsRequest", "BatchRunPivotReportsResponse", "BatchRunReportsRequest", "BatchRunReportsResponse", "CheckCompatibilityRequest", "CheckCompatibilityResponse", + "CreateAudienceExportRequest", + "GetAudienceExportRequest", "GetMetadataRequest", + "ListAudienceExportsRequest", + "ListAudienceExportsResponse", "Metadata", + "QueryAudienceExportRequest", + "QueryAudienceExportResponse", "RunPivotReportRequest", "RunPivotReportResponse", "RunRealtimeReportRequest", @@ -109,6 +132,7 @@ "QuotaStatus", "ResponseMetaData", "Row", + "SamplingMetadata", "Compatibility", "MetricAggregation", "MetricType", diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py index 579c0d76e143..d46fc85fa256 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/analytics_data_api.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.analytics.data_v1beta.types import data @@ -38,6 +39,17 @@ "GetMetadataRequest", "RunRealtimeReportRequest", "RunRealtimeReportResponse", + "GetAudienceExportRequest", + "ListAudienceExportsRequest", + "ListAudienceExportsResponse", + "CreateAudienceExportRequest", + "AudienceExport", + "AudienceExportMetadata", + "QueryAudienceExportRequest", + "QueryAudienceExportResponse", + "AudienceRow", + "AudienceDimension", + "AudienceDimensionValue", }, ) @@ -135,8 +147,8 @@ class CheckCompatibilityResponse(proto.Message): class Metadata(proto.Message): - r"""The dimensions and metrics currently accepted in reporting - methods. + r"""The dimensions, metrics and comparisons currently accepted in + reporting methods. Attributes: name (str): @@ -991,4 +1003,409 @@ class RunRealtimeReportResponse(proto.Message): ) +class GetAudienceExportRequest(proto.Message): + r"""A request to retrieve configuration metadata about a specific + audience export. + + Attributes: + name (str): + Required. The audience export resource name. Format: + ``properties/{property}/audienceExports/{audience_export}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAudienceExportsRequest(proto.Message): + r"""A request to list all audience exports for a property. + + Attributes: + parent (str): + Required. All audience exports for this property will be + listed in the response. Format: ``properties/{property}`` + page_size (int): + Optional. The maximum number of audience + exports to return. The service may return fewer + than this value. If unspecified, at most 200 + audience exports will be returned. The maximum + value is 1000 (higher values will be coerced to + the maximum). + page_token (str): + Optional. A page token, received from a previous + ``ListAudienceExports`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAudienceExports`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAudienceExportsResponse(proto.Message): + r"""A list of all audience exports for a property. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_exports (MutableSequence[google.analytics.data_v1beta.types.AudienceExport]): + Each audience export for a property. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + audience_exports: MutableSequence["AudienceExport"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AudienceExport", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class CreateAudienceExportRequest(proto.Message): + r"""A request to create a new audience export. + + Attributes: + parent (str): + Required. The parent resource where this audience export + will be created. Format: ``properties/{property}`` + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Required. The audience export to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + audience_export: "AudienceExport" = proto.Field( + proto.MESSAGE, + number=2, + message="AudienceExport", + ) + + +class AudienceExport(proto.Message): + r"""An audience export is a list of users in an audience at the + time of the list's creation. One audience may have multiple + audience exports created for different days. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The audience export resource name + assigned during creation. This resource name identifies this + ``AudienceExport``. + + Format: + ``properties/{property}/audienceExports/{audience_export}`` + audience (str): + Required. The audience resource name. This resource name + identifies the audience being listed and is shared between + the Analytics Data & Admin APIs. + + Format: ``properties/{property}/audiences/{audience}`` + audience_display_name (str): + Output only. The descriptive display name for + this audience. For example, "Purchasers". + dimensions (MutableSequence[google.analytics.data_v1beta.types.AudienceDimension]): + Required. The dimensions requested and + displayed in the query response. + state (google.analytics.data_v1beta.types.AudienceExport.State): + Output only. The current state for this + AudienceExport. + + This field is a member of `oneof`_ ``_state``. + begin_creating_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when CreateAudienceExport was called + and the AudienceExport began the ``CREATING`` state. + + This field is a member of `oneof`_ ``_begin_creating_time``. + creation_quota_tokens_charged (int): + Output only. The total quota tokens charged during creation + of the AudienceExport. Because this token count is based on + activity from the ``CREATING`` state, this tokens charged + will be fixed once an AudienceExport enters the ``ACTIVE`` + or ``FAILED`` states. + row_count (int): + Output only. The total number of rows in the + AudienceExport result. + + This field is a member of `oneof`_ ``_row_count``. + error_message (str): + Output only. Error message is populated when + an audience export fails during creation. A + common reason for such a failure is quota + exhaustion. + + This field is a member of `oneof`_ ``_error_message``. + percentage_completed (float): + Output only. The percentage completed for + this audience export ranging between 0 to 100. + + This field is a member of `oneof`_ ``_percentage_completed``. + """ + + class State(proto.Enum): + r"""The AudienceExport currently exists in this state. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state will never be used. + CREATING (1): + The AudienceExport is currently creating and + will be available in the future. Creating occurs + immediately after the CreateAudienceExport call. + ACTIVE (2): + The AudienceExport is fully created and ready + for querying. An AudienceExport is updated to + active asynchronously from a request; this + occurs some time (for example 15 minutes) after + the initial create call. + FAILED (3): + The AudienceExport failed to be created. It + is possible that re-requesting this audience + export will succeed. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + audience_display_name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence["AudienceDimension"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AudienceDimension", + ) + state: State = proto.Field( + proto.ENUM, + number=5, + optional=True, + enum=State, + ) + begin_creating_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + creation_quota_tokens_charged: int = proto.Field( + proto.INT32, + number=7, + ) + row_count: int = proto.Field( + proto.INT32, + number=8, + optional=True, + ) + error_message: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + percentage_completed: float = proto.Field( + proto.DOUBLE, + number=10, + optional=True, + ) + + +class AudienceExportMetadata(proto.Message): + r"""This metadata is currently blank.""" + + +class QueryAudienceExportRequest(proto.Message): + r"""A request to list users in an audience export. + + Attributes: + name (str): + Required. The name of the audience export to retrieve users + from. Format: + ``properties/{property}/audienceExports/{audience_export}`` + offset (int): + Optional. The row count of the start row. The first row is + counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The number of rows to return. If unspecified, + 10,000 rows are returned. The API returns a maximum of + 250,000 rows per request, no matter how many you ask for. + ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. + + To learn more about this pagination parameter, see + `Pagination `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + offset: int = proto.Field( + proto.INT64, + number=2, + ) + limit: int = proto.Field( + proto.INT64, + number=3, + ) + + +class QueryAudienceExportResponse(proto.Message): + r"""A list of users in an audience export. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_export (google.analytics.data_v1beta.types.AudienceExport): + Configuration data about AudienceExport being + queried. Returned to help interpret the audience + rows in this response. For example, the + dimensions in this AudienceExport correspond to + the columns in the AudienceRows. + + This field is a member of `oneof`_ ``_audience_export``. + audience_rows (MutableSequence[google.analytics.data_v1beta.types.AudienceRow]): + Rows for each user in an audience export. The + number of rows in this response will be less + than or equal to request's page size. + row_count (int): + The total number of rows in the AudienceExport result. + ``rowCount`` is independent of the number of rows returned + in the response, the ``limit`` request parameter, and the + ``offset`` request parameter. For example if a query returns + 175 rows and includes ``limit`` of 50 in the API request, + the response will contain ``rowCount`` of 175 but only 50 + rows. + + To learn more about this pagination parameter, see + `Pagination `__. + + This field is a member of `oneof`_ ``_row_count``. + """ + + audience_export: "AudienceExport" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="AudienceExport", + ) + audience_rows: MutableSequence["AudienceRow"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AudienceRow", + ) + row_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + + +class AudienceRow(proto.Message): + r"""Dimension value attributes for the audience user row. + + Attributes: + dimension_values (MutableSequence[google.analytics.data_v1beta.types.AudienceDimensionValue]): + Each dimension value attribute for an + audience user. One dimension value will be added + for each dimension column requested. + """ + + dimension_values: MutableSequence["AudienceDimensionValue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AudienceDimensionValue", + ) + + +class AudienceDimension(proto.Message): + r"""An audience dimension is a user attribute. Specific user attributed + are requested and then later returned in the + ``QueryAudienceExportResponse``. + + Attributes: + dimension_name (str): + Optional. The API name of the dimension. See the `API + Dimensions `__ + for the list of dimension names. + """ + + dimension_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AudienceDimensionValue(proto.Message): + r"""The value of a dimension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + Value as a string if the dimension type is a + string. + + This field is a member of `oneof`_ ``one_value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + oneof="one_value", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py index 121582aa9dea..037fed97904e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/types/data.py @@ -41,6 +41,7 @@ "CohortsRange", "CohortReportSettings", "ResponseMetaData", + "SamplingMetadata", "DimensionHeader", "MetricHeader", "PivotHeader", @@ -284,7 +285,15 @@ class Dimension(proto.Message): name (str): The name of the dimension. See the `API Dimensions `__ - for the list of dimension names. + for the list of dimension names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Dimensions `__ + for the list of dimension names supported by the + ``runRealtimeReport`` method. See `Funnel + Dimensions `__ + for the list of dimension names supported by the + ``runFunnelReport`` method. If ``dimensionExpression`` is specified, ``name`` can be any string that you would like within the allowed character set. @@ -418,7 +427,15 @@ class Metric(proto.Message): name (str): The name of the metric. See the `API Metrics `__ - for the list of metric names. + for the list of metric names supported by core reporting + methods such as ``runReport`` and ``batchRunReports``. See + `Realtime + Metrics `__ + for the list of metric names supported by the + ``runRealtimeReport`` method. See `Funnel + Metrics `__ + for the list of metric names supported by the + ``runFunnelReport`` method. If ``expression`` is specified, ``name`` can be any string that you would like within the allowed character set. For @@ -1255,11 +1272,19 @@ class ResponseMetaData(proto.Message): subject to thresholding thresholding and no data is absent from the report, and this happens when all data is above the thresholds. To learn more, see `Data - thresholds `__ - and `About Demographics and - Interests `__. + thresholds `__. This field is a member of `oneof`_ ``_subject_to_thresholding``. + sampling_metadatas (MutableSequence[google.analytics.data_v1beta.types.SamplingMetadata]): + If this report results is + `sampled `__, + this describes the percentage of events used in this report. + One ``samplingMetadatas`` is populated for each date range. + Each ``samplingMetadatas`` corresponds to a date range in + order that date ranges were specified in the request. + + However if the results are not sampled, this field will not + be defined. """ class SchemaRestrictionResponse(proto.Message): @@ -1341,6 +1366,46 @@ class ActiveMetricRestriction(proto.Message): number=8, optional=True, ) + sampling_metadatas: MutableSequence["SamplingMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="SamplingMetadata", + ) + + +class SamplingMetadata(proto.Message): + r"""If this report results is + `sampled `__, + this describes the percentage of events used in this report. + Sampling is the practice of analyzing a subset of all data in order + to uncover the meaningful information in the larger data set. + + Attributes: + samples_read_count (int): + The total number of events read in this + sampled report for a date range. This is the + size of the subset this property's data that was + analyzed in this report. + sampling_space_size (int): + The total number of events present in this property's data + that could have been analyzed in this report for a date + range. Sampling uncovers the meaningful information about + the larger data set, and this is the size of the larger data + set. + + To calculate the percentage of available data that was used + in this report, compute + ``samplesReadCount/samplingSpaceSize``. + """ + + samples_read_count: int = proto.Field( + proto.INT64, + number=1, + ) + sampling_space_size: int = proto.Field( + proto.INT64, + number=2, + ) class DimensionHeader(proto.Message): @@ -1640,20 +1705,28 @@ class PropertyQuota(proto.Message): class QuotaStatus(proto.Message): r"""Current state for a particular quota group. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: consumed (int): Quota consumed by this request. + + This field is a member of `oneof`_ ``_consumed``. remaining (int): Quota remaining after this request. + + This field is a member of `oneof`_ ``_remaining``. """ consumed: int = proto.Field( proto.INT32, number=1, + optional=True, ) remaining: int = proto.Field( proto.INT32, number=2, + optional=True, ) diff --git a/packages/google-analytics-data/noxfile.py b/packages/google-analytics-data/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-analytics-data/noxfile.py +++ b/packages/google-analytics-data/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py new file mode 100644 index 000000000000..ddbdd1fb56ad --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py new file mode 100644 index 000000000000..a96fea76d9ae --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_create_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + audience_export = data_v1beta.AudienceExport() + audience_export.audience = "audience_value" + + request = data_v1beta.CreateAudienceExportRequest( + parent="parent_value", + audience_export=audience_export, + ) + + # Make the request + operation = client.create_audience_export(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py new file mode 100644 index 000000000000..e1bedcc23940 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py new file mode 100644 index 000000000000..071d06c88baa --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_get_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.GetAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.get_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py new file mode 100644 index 000000000000..81df8332ac2e --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceExports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py new file mode 100644 index 000000000000..aef6ea5af3a9 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAudienceExports +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_list_audience_exports(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.ListAudienceExportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_audience_exports(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py new file mode 100644 index 000000000000..d33405429694 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +async def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = await client.query_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py new file mode 100644 index 000000000000..cafc44c01402 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAudienceExport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1beta + + +def sample_query_audience_export(): + # Create a client + client = data_v1beta.BetaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1beta.QueryAudienceExportRequest( + name="name_value", + ) + + # Make the request + response = client.query_audience_export(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 218723a01eaa..f0403c4322cb 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.1" + "version": "0.18.3" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 306aa479d695..aa46dc45d3a4 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.1" + "version": "0.18.3" }, "snippets": [ { @@ -470,6 +470,336 @@ ], "title": "analyticsdata_v1beta_generated_beta_analytics_data_check_compatibility_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.create_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CreateAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CreateAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CreateAudienceExportRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_export", + "type": "google.analytics.data_v1beta.types.AudienceExport" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_audience_export" + }, + "description": "Sample for CreateAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.create_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.CreateAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "CreateAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.CreateAudienceExportRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "audience_export", + "type": "google.analytics.data_v1beta.types.AudienceExport" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_audience_export" + }, + "description": "Sample for CreateAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_CreateAudienceExport_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_create_audience_export_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.get_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.AudienceExport", + "shortName": "get_audience_export" + }, + "description": "Sample for GetAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.get_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.AudienceExport", + "shortName": "get_audience_export" + }, + "description": "Sample for GetAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetAudienceExport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_audience_export_sync.py" + }, { "canonical": true, "clientMethod": { @@ -513,10 +843,90 @@ "shortName": "get_metadata" }, "description": "Sample for GetMetadata", - "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.get_metadata", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetMetadata", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "GetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.GetMetadataRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.Metadata", + "shortName": "get_metadata" + }, + "description": "Sample for GetMetadata", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_async", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_sync", "segments": [ { "end": 51, @@ -549,7 +959,88 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_async.py" + "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.list_audience_exports", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.ListAudienceExports", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "ListAudienceExports" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.ListAudienceExportsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsAsyncPager", + "shortName": "list_audience_exports" + }, + "description": "Sample for ListAudienceExports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_async.py" }, { "canonical": true, @@ -558,19 +1049,100 @@ "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", "shortName": "BetaAnalyticsDataClient" }, - "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.get_metadata", + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.list_audience_exports", "method": { - "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.GetMetadata", + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.ListAudienceExports", "service": { "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", "shortName": "BetaAnalyticsData" }, - "shortName": "GetMetadata" + "shortName": "ListAudienceExports" }, "parameters": [ { "name": "request", - "type": "google.analytics.data_v1beta.types.GetMetadataRequest" + "type": "google.analytics.data_v1beta.types.ListAudienceExportsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.services.beta_analytics_data.pagers.ListAudienceExportsPager", + "shortName": "list_audience_exports" + }, + "description": "Sample for ListAudienceExports", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_ListAudienceExports_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_list_audience_exports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient", + "shortName": "BetaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataAsyncClient.query_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.QueryAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "QueryAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.QueryAudienceExportRequest" }, { "name": "name", @@ -589,14 +1161,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.analytics.data_v1beta.types.Metadata", - "shortName": "get_metadata" + "resultType": "google.analytics.data_v1beta.types.QueryAudienceExportResponse", + "shortName": "query_audience_export" }, - "description": "Sample for GetMetadata", - "file": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py", + "description": "Sample for QueryAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_GetMetadata_sync", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_async", "segments": [ { "end": 51, @@ -629,7 +1201,87 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticsdata_v1beta_generated_beta_analytics_data_get_metadata_sync.py" + "title": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient", + "shortName": "BetaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1beta.BetaAnalyticsDataClient.query_audience_export", + "method": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData.QueryAudienceExport", + "service": { + "fullName": "google.analytics.data.v1beta.BetaAnalyticsData", + "shortName": "BetaAnalyticsData" + }, + "shortName": "QueryAudienceExport" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1beta.types.QueryAudienceExportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1beta.types.QueryAudienceExportResponse", + "shortName": "query_audience_export" + }, + "description": "Sample for QueryAudienceExport", + "file": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1beta_generated_BetaAnalyticsData_QueryAudienceExport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1beta_generated_beta_analytics_data_query_audience_export_sync.py" }, { "canonical": true, diff --git a/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py index 7057fd59e2eb..43dd6a4d91ab 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1beta_keywords.py @@ -42,7 +42,11 @@ class dataCallTransformer(cst.CSTTransformer): 'batch_run_pivot_reports': ('property', 'requests', ), 'batch_run_reports': ('property', 'requests', ), 'check_compatibility': ('property', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'compatibility_filter', ), + 'create_audience_export': ('parent', 'audience_export', ), + 'get_audience_export': ('name', ), 'get_metadata': ('name', ), + 'list_audience_exports': ('parent', 'page_size', 'page_token', ), + 'query_audience_export': ('name', 'offset', 'limit', ), 'run_pivot_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'pivots', 'dimension_filter', 'metric_filter', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ), 'run_realtime_report': ('property', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'limit', 'metric_aggregations', 'order_bys', 'return_property_quota', 'minute_ranges', ), 'run_report': ('property', 'dimensions', 'metrics', 'date_ranges', 'dimension_filter', 'metric_filter', 'offset', 'limit', 'metric_aggregations', 'order_bys', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ), diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index d67e5809537e..38513bef9d3a 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -3535,6 +3535,10 @@ def test_create_audience_list_rest(request_type): "error_message": "error_message_value", "percentage_completed": 0.2106, "recurring_audience_list": "recurring_audience_list_value", + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5074,6 +5078,10 @@ def test_create_recurring_audience_list_rest(request_type): "dimensions": [{"dimension_name": "dimension_name_value"}], "active_days_remaining": 2213, "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 122acd0ea983..65d5f5b6e3f9 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -26,14 +26,25 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -45,6 +56,7 @@ from google.analytics.data_v1beta.services.beta_analytics_data import ( BetaAnalyticsDataAsyncClient, BetaAnalyticsDataClient, + pagers, transports, ) from google.analytics.data_v1beta.types import analytics_data_api, data @@ -1916,241 +1928,2115 @@ async def test_check_compatibility_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.RunReportRequest, + analytics_data_api.CreateAudienceExportRequest, dict, ], ) -def test_run_report_rest(request_type): +def test_create_audience_export(request_type, transport: str = "grpc"): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunReportResponse( - row_count=992, - kind="kind_value", - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunReportResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_audience_export(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.run_report(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateAudienceExportRequest() # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunReportResponse) - assert response.row_count == 992 - assert response.kind == "kind_value" + assert isinstance(response, future.Future) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_report_rest_interceptors(null_interceptor): - transport = transports.BetaAnalyticsDataRestTransport( +def test_create_audience_export_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.BetaAnalyticsDataRestInterceptor(), + transport="grpc", ) - client = BetaAnalyticsDataClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_run_report" - ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_run_report" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_data_api.RunReportRequest.pb( - analytics_data_api.RunReportRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunReportResponse.to_json( - analytics_data_api.RunReportResponse() - ) - - request = analytics_data_api.RunReportRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analytics_data_api.RunReportResponse() - - client.run_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + client.create_audience_export() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateAudienceExportRequest() -def test_run_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunReportRequest +@pytest.mark.asyncio +async def test_create_audience_export_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.CreateAudienceExportRequest, ): - client = BetaAnalyticsDataClient( + client = BetaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_report(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateAudienceExportRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_run_report_rest_error(): - client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +@pytest.mark.asyncio +async def test_create_audience_export_async_from_dict(): + await test_create_audience_export_async(request_type=dict) -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.RunPivotReportRequest, - dict, - ], -) -def test_run_pivot_report_rest(request_type): + +def test_create_audience_export_field_headers(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceExportRequest() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunPivotReportResponse( - kind="kind_value", - ) + request.parent = "parent_value" - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunPivotReportResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_audience_export(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.run_pivot_report(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunPivotReportResponse) - assert response.kind == "kind_value" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_pivot_report_rest_interceptors(null_interceptor): - transport = transports.BetaAnalyticsDataRestTransport( +@pytest.mark.asyncio +async def test_create_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.BetaAnalyticsDataRestInterceptor(), ) - client = BetaAnalyticsDataClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_run_pivot_report" - ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_run_pivot_report" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = analytics_data_api.RunPivotReportRequest.pb( - analytics_data_api.RunPivotReportRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunPivotReportResponse.to_json( - analytics_data_api.RunPivotReportResponse() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateAudienceExportRequest() - request = analytics_data_api.RunPivotReportRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = analytics_data_api.RunPivotReportResponse() + request.parent = "parent_value" - client.run_pivot_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.create_audience_export(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_run_pivot_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunPivotReportRequest -): + +def test_create_audience_export_flattened(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_audience_export( + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].audience_export + mock_val = analytics_data_api.AudienceExport(name="name_value") + assert arg == mock_val + + +def test_create_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_audience_export( + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].audience_export + mock_val = analytics_data_api.AudienceExport(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.QueryAudienceExportRequest, + dict, + ], +) +def test_query_audience_export(request_type, transport: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse( + row_count=992, + ) + response = client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryAudienceExportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 + + +def test_query_audience_export_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + client.query_audience_export() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryAudienceExportRequest() + + +@pytest.mark.asyncio +async def test_query_audience_export_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.QueryAudienceExportRequest, +): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.QueryAudienceExportResponse( + row_count=992, + ) + ) + response = await client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.QueryAudienceExportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 + + +@pytest.mark.asyncio +async def test_query_audience_export_async_from_dict(): + await test_query_audience_export_async(request_type=dict) + + +def test_query_audience_export_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceExportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + call.return_value = analytics_data_api.QueryAudienceExportResponse() + client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.QueryAudienceExportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.QueryAudienceExportResponse() + ) + await client.query_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_query_audience_export_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.query_audience_export( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_query_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_query_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.QueryAudienceExportResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.QueryAudienceExportResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.query_audience_export( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_query_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetAudienceExportRequest, + dict, + ], +) +def test_get_audience_export(request_type, transport: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + ) + response = client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetAudienceExportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +def test_get_audience_export_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + client.get_audience_export() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetAudienceExportRequest() + + +@pytest.mark.asyncio +async def test_get_audience_export_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.GetAudienceExportRequest, +): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.AudienceExport( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, + row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + ) + ) + response = await client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.GetAudienceExportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 + assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_get_audience_export_async_from_dict(): + await test_get_audience_export_async(request_type=dict) + + +def test_get_audience_export_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceExportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + call.return_value = analytics_data_api.AudienceExport() + client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_audience_export_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.GetAudienceExportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.AudienceExport() + ) + await client.get_audience_export(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_audience_export_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_audience_export( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_audience_export_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_audience_export_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_audience_export), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.AudienceExport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.AudienceExport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_audience_export( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_audience_export_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.ListAudienceExportsRequest, + dict, + ], +) +def test_list_audience_exports(request_type, transport: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListAudienceExportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceExportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_audience_exports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + client.list_audience_exports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListAudienceExportsRequest() + + +@pytest.mark.asyncio +async def test_list_audience_exports_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.ListAudienceExportsRequest, +): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListAudienceExportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListAudienceExportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAudienceExportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_audience_exports_async_from_dict(): + await test_list_audience_exports_async(request_type=dict) + + +def test_list_audience_exports_field_headers(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceExportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + call.return_value = analytics_data_api.ListAudienceExportsResponse() + client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_audience_exports_field_headers_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListAudienceExportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListAudienceExportsResponse() + ) + await client.list_audience_exports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_audience_exports_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_audience_exports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_audience_exports_flattened_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_audience_exports_flattened_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceExportsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListAudienceExportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_audience_exports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_audience_exports_flattened_error_async(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent="parent_value", + ) + + +def test_list_audience_exports_pager(transport_name: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_audience_exports(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) for i in results) + + +def test_list_audience_exports_pages(transport_name: str = "grpc"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_audience_exports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_audience_exports_async_pager(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_audience_exports( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) for i in responses) + + +@pytest.mark.asyncio +async def test_list_audience_exports_async_pages(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_exports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_audience_exports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunReportRequest, + dict, + ], +) +def test_run_report_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_run_report" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_run_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunReportRequest.pb( + analytics_data_api.RunReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunReportResponse.to_json( + analytics_data_api.RunReportResponse() + ) + + request = analytics_data_api.RunReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunReportResponse() + + client.run_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunReportRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_report(request) + + +def test_run_report_rest_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunPivotReportRequest, + dict, + ], +) +def test_run_pivot_report_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunPivotReportResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunPivotReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_pivot_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunPivotReportResponse) + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_pivot_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_run_pivot_report" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_run_pivot_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunPivotReportRequest.pb( + analytics_data_api.RunPivotReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunPivotReportResponse.to_json( + analytics_data_api.RunPivotReportResponse() + ) + + request = analytics_data_api.RunPivotReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunPivotReportResponse() + + client.run_pivot_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_pivot_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunPivotReportRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_pivot_report(request) + + +def test_run_pivot_report_rest_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.BatchRunReportsRequest, + dict, + ], +) +def test_batch_run_reports_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.BatchRunReportsResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.BatchRunReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_run_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunReportsResponse) + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_run_reports_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_reports" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.BatchRunReportsRequest.pb( + analytics_data_api.BatchRunReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.BatchRunReportsResponse.to_json( + analytics_data_api.BatchRunReportsResponse() + ) + + request = analytics_data_api.BatchRunReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.BatchRunReportsResponse() + + client.batch_run_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_run_reports_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.BatchRunReportsRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_run_reports(request) + + +def test_batch_run_reports_rest_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.BatchRunPivotReportsRequest, + dict, + ], +) +def test_batch_run_pivot_reports_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.BatchRunPivotReportsResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.BatchRunPivotReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_run_pivot_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse) + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_pivot_reports" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_pivot_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.BatchRunPivotReportsRequest.pb( + analytics_data_api.BatchRunPivotReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_data_api.BatchRunPivotReportsResponse.to_json( + analytics_data_api.BatchRunPivotReportsResponse() + ) + ) + + request = analytics_data_api.BatchRunPivotReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.BatchRunPivotReportsResponse() + + client.batch_run_pivot_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_run_pivot_reports_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.BatchRunPivotReportsRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_run_pivot_reports(request) + + +def test_batch_run_pivot_reports_rest_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetMetadataRequest, + dict, + ], +) +def test_get_metadata_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/metadata"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == "name_value" + + +def test_get_metadata_rest_required_fields( + request_type=analytics_data_api.GetMetadataRequest, +): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_metadata(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_metadata_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_metadata_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_get_metadata" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_get_metadata" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.GetMetadataRequest.pb( + analytics_data_api.GetMetadataRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.Metadata.to_json( + analytics_data_api.Metadata() + ) + + request = analytics_data_api.GetMetadataRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.Metadata() + + client.get_metadata( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_metadata_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetMetadataRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/metadata"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_metadata(request) + + +def test_get_metadata_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/metadata"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=properties/*/metadata}" % client.transport._host, args[1] + ) + + +def test_get_metadata_rest_flattened_error(transport: str = "rest"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name="name_value", + ) + + +def test_get_metadata_rest_error(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunRealtimeReportRequest, + dict, + ], +) +def test_run_realtime_report_rest(request_type): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunRealtimeReportResponse( + row_count=992, + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunRealtimeReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_realtime_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunRealtimeReportResponse) + assert response.row_count == 992 + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_realtime_report_rest_interceptors(null_interceptor): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BetaAnalyticsDataRestInterceptor(), + ) + client = BetaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_run_realtime_report" + ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "pre_run_realtime_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunRealtimeReportRequest.pb( + analytics_data_api.RunRealtimeReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + analytics_data_api.RunRealtimeReportResponse.to_json( + analytics_data_api.RunRealtimeReportResponse() + ) + ) + + request = analytics_data_api.RunRealtimeReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunRealtimeReportResponse() + + client.run_realtime_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_realtime_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunRealtimeReportRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, "request") as req, pytest.raises( core_exceptions.BadRequest @@ -2160,10 +4046,10 @@ def test_run_pivot_report_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.run_pivot_report(request) + client.run_realtime_report(request) -def test_run_pivot_report_rest_error(): +def test_run_realtime_report_rest_error(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2172,11 +4058,11 @@ def test_run_pivot_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.BatchRunReportsRequest, + analytics_data_api.CheckCompatibilityRequest, dict, ], ) -def test_batch_run_reports_rest(request_type): +def test_check_compatibility_rest(request_type): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2189,28 +4075,25 @@ def test_batch_run_reports_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.BatchRunReportsResponse( - kind="kind_value", - ) + return_value = analytics_data_api.CheckCompatibilityResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.BatchRunReportsResponse.pb(return_value) + return_value = analytics_data_api.CheckCompatibilityResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_run_reports(request) + response = client.check_compatibility(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.BatchRunReportsResponse) - assert response.kind == "kind_value" + assert isinstance(response, analytics_data_api.CheckCompatibilityResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_run_reports_rest_interceptors(null_interceptor): +def test_check_compatibility_rest_interceptors(null_interceptor): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2223,14 +4106,14 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_reports" + transports.BetaAnalyticsDataRestInterceptor, "post_check_compatibility" ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_reports" + transports.BetaAnalyticsDataRestInterceptor, "pre_check_compatibility" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.BatchRunReportsRequest.pb( - analytics_data_api.BatchRunReportsRequest() + pb_message = analytics_data_api.CheckCompatibilityRequest.pb( + analytics_data_api.CheckCompatibilityRequest() ) transcode.return_value = { "method": "post", @@ -2242,19 +4125,21 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.BatchRunReportsResponse.to_json( - analytics_data_api.BatchRunReportsResponse() + req.return_value._content = ( + analytics_data_api.CheckCompatibilityResponse.to_json( + analytics_data_api.CheckCompatibilityResponse() + ) ) - request = analytics_data_api.BatchRunReportsRequest() + request = analytics_data_api.CheckCompatibilityRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.BatchRunReportsResponse() + post.return_value = analytics_data_api.CheckCompatibilityResponse() - client.batch_run_reports( + client.check_compatibility( request, metadata=[ ("key", "val"), @@ -2266,8 +4151,8 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_run_reports_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.BatchRunReportsRequest +def test_check_compatibility_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CheckCompatibilityRequest ): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2287,10 +4172,10 @@ def test_batch_run_reports_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_run_reports(request) + client.check_compatibility(request) -def test_batch_run_reports_rest_error(): +def test_check_compatibility_rest_error(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2299,45 +4184,214 @@ def test_batch_run_reports_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.BatchRunPivotReportsRequest, + analytics_data_api.CreateAudienceExportRequest, dict, ], ) -def test_batch_run_pivot_reports_rest(request_type): +def test_create_audience_export_rest(request_type): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"parent": "properties/sample1"} + request_init["audience_export"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "row_count": 992, + "error_message": "error_message_value", + "percentage_completed": 0.2106, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceExportRequest.meta.fields[ + "audience_export" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_export"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_export"][field])): + del request_init["audience_export"][field][i][subfield] + else: + del request_init["audience_export"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.BatchRunPivotReportsResponse( - kind="kind_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.BatchRunPivotReportsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_run_pivot_reports(request) + response = client.create_audience_export(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse) - assert response.kind == "kind_value" + assert response.operation.name == "operations/spam" + + +def test_create_audience_export_rest_required_fields( + request_type=analytics_data_api.CreateAudienceExportRequest, +): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_audience_export(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_audience_export_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_audience_export._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "audienceExport", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): +def test_create_audience_export_rest_interceptors(null_interceptor): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2350,14 +4404,16 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_pivot_reports" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_create_audience_export" ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_pivot_reports" + transports.BetaAnalyticsDataRestInterceptor, "pre_create_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.BatchRunPivotReportsRequest.pb( - analytics_data_api.BatchRunPivotReportsRequest() + pb_message = analytics_data_api.CreateAudienceExportRequest.pb( + analytics_data_api.CreateAudienceExportRequest() ) transcode.return_value = { "method": "post", @@ -2367,23 +4423,21 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): } req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.BatchRunPivotReportsResponse.to_json( - analytics_data_api.BatchRunPivotReportsResponse() - ) + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = analytics_data_api.BatchRunPivotReportsRequest() + request = analytics_data_api.CreateAudienceExportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.BatchRunPivotReportsResponse() + post.return_value = operations_pb2.Operation() - client.batch_run_pivot_reports( + client.create_audience_export( request, metadata=[ ("key", "val"), @@ -2395,8 +4449,8 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_run_pivot_reports_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.BatchRunPivotReportsRequest +def test_create_audience_export_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CreateAudienceExportRequest ): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2404,7 +4458,7 @@ def test_batch_run_pivot_reports_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2416,10 +4470,66 @@ def test_batch_run_pivot_reports_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_run_pivot_reports(request) + client.create_audience_export(request) -def test_batch_run_pivot_reports_rest_error(): +def test_create_audience_export_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_audience_export(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=properties/*}/audienceExports" % client.transport._host, + args[1], + ) + + +def test_create_audience_export_rest_flattened_error(transport: str = "rest"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_export( + analytics_data_api.CreateAudienceExportRequest(), + parent="parent_value", + audience_export=analytics_data_api.AudienceExport(name="name_value"), + ) + + +def test_create_audience_export_rest_error(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2428,45 +4538,45 @@ def test_batch_run_pivot_reports_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetMetadataRequest, + analytics_data_api.QueryAudienceExportRequest, dict, ], ) -def test_get_metadata_rest(request_type): +def test_query_audience_export_rest(request_type): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/metadata"} + request_init = {"name": "properties/sample1/audienceExports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.Metadata( - name="name_value", + return_value = analytics_data_api.QueryAudienceExportResponse( + row_count=992, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.Metadata.pb(return_value) + return_value = analytics_data_api.QueryAudienceExportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_metadata(request) + response = client.query_audience_export(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.Metadata) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryAudienceExportResponse) + assert response.row_count == 992 -def test_get_metadata_rest_required_fields( - request_type=analytics_data_api.GetMetadataRequest, +def test_query_audience_export_rest_required_fields( + request_type=analytics_data_api.QueryAudienceExportRequest, ): transport_class = transports.BetaAnalyticsDataRestTransport @@ -2486,7 +4596,7 @@ def test_get_metadata_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata._get_unset_required_fields(jsonified_request) + ).query_audience_export._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2495,7 +4605,7 @@ def test_get_metadata_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata._get_unset_required_fields(jsonified_request) + ).query_audience_export._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2509,7 +4619,7 @@ def test_get_metadata_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.Metadata() + return_value = analytics_data_api.QueryAudienceExportResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2521,39 +4631,42 @@ def test_get_metadata_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.Metadata.pb(return_value) + return_value = analytics_data_api.QueryAudienceExportResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_metadata(request) + response = client.query_audience_export(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_metadata_rest_unset_required_fields(): +def test_query_audience_export_rest_unset_required_fields(): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_metadata._get_unset_required_fields({}) + unset_fields = transport.query_audience_export._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_metadata_rest_interceptors(null_interceptor): +def test_query_audience_export_rest_interceptors(null_interceptor): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2566,14 +4679,14 @@ def test_get_metadata_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_get_metadata" + transports.BetaAnalyticsDataRestInterceptor, "post_query_audience_export" ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_get_metadata" + transports.BetaAnalyticsDataRestInterceptor, "pre_query_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetMetadataRequest.pb( - analytics_data_api.GetMetadataRequest() + pb_message = analytics_data_api.QueryAudienceExportRequest.pb( + analytics_data_api.QueryAudienceExportRequest() ) transcode.return_value = { "method": "post", @@ -2585,19 +4698,21 @@ def test_get_metadata_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.Metadata.to_json( - analytics_data_api.Metadata() + req.return_value._content = ( + analytics_data_api.QueryAudienceExportResponse.to_json( + analytics_data_api.QueryAudienceExportResponse() + ) ) - request = analytics_data_api.GetMetadataRequest() + request = analytics_data_api.QueryAudienceExportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.Metadata() + post.return_value = analytics_data_api.QueryAudienceExportResponse() - client.get_metadata( + client.query_audience_export( request, metadata=[ ("key", "val"), @@ -2609,8 +4724,8 @@ def test_get_metadata_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_metadata_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.GetMetadataRequest +def test_query_audience_export_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.QueryAudienceExportRequest ): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2618,7 +4733,7 @@ def test_get_metadata_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/metadata"} + request_init = {"name": "properties/sample1/audienceExports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2630,10 +4745,10 @@ def test_get_metadata_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_metadata(request) + client.query_audience_export(request) -def test_get_metadata_rest_flattened(): +def test_query_audience_export_rest_flattened(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2642,10 +4757,10 @@ def test_get_metadata_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.Metadata() + return_value = analytics_data_api.QueryAudienceExportResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/metadata"} + sample_request = {"name": "properties/sample1/audienceExports/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -2657,23 +4772,25 @@ def test_get_metadata_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.Metadata.pb(return_value) + return_value = analytics_data_api.QueryAudienceExportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_metadata(**mock_args) + client.query_audience_export(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=properties/*/metadata}" % client.transport._host, args[1] + "%s/v1beta/{name=properties/*/audienceExports/*}:query" + % client.transport._host, + args[1], ) -def test_get_metadata_rest_flattened_error(transport: str = "rest"): +def test_query_audience_export_rest_flattened_error(transport: str = "rest"): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2682,13 +4799,13 @@ def test_get_metadata_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_metadata( - analytics_data_api.GetMetadataRequest(), + client.query_audience_export( + analytics_data_api.QueryAudienceExportRequest(), name="name_value", ) -def test_get_metadata_rest_error(): +def test_query_audience_export_rest_error(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2697,47 +4814,146 @@ def test_get_metadata_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.RunRealtimeReportRequest, + analytics_data_api.GetAudienceExportRequest, dict, ], ) -def test_run_realtime_report_rest(request_type): +def test_get_audience_export_rest(request_type): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"name": "properties/sample1/audienceExports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunRealtimeReportResponse( + return_value = analytics_data_api.AudienceExport( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceExport.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, - kind="kind_value", + error_message="error_message_value", + percentage_completed=0.2106, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RunRealtimeReportResponse.pb(return_value) + return_value = analytics_data_api.AudienceExport.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_realtime_report(request) + response = client.get_audience_export(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunRealtimeReportResponse) + assert isinstance(response, analytics_data_api.AudienceExport) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceExport.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 - assert response.kind == "kind_value" + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + + +def test_get_audience_export_rest_required_fields( + request_type=analytics_data_api.GetAudienceExportRequest, +): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_audience_export._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceExport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceExport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_audience_export(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_audience_export_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_audience_export._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_realtime_report_rest_interceptors(null_interceptor): +def test_get_audience_export_rest_interceptors(null_interceptor): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2750,14 +4966,14 @@ def test_run_realtime_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_run_realtime_report" + transports.BetaAnalyticsDataRestInterceptor, "post_get_audience_export" ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_run_realtime_report" + transports.BetaAnalyticsDataRestInterceptor, "pre_get_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.RunRealtimeReportRequest.pb( - analytics_data_api.RunRealtimeReportRequest() + pb_message = analytics_data_api.GetAudienceExportRequest.pb( + analytics_data_api.GetAudienceExportRequest() ) transcode.return_value = { "method": "post", @@ -2769,57 +4985,111 @@ def test_run_realtime_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.RunRealtimeReportResponse.to_json( - analytics_data_api.RunRealtimeReportResponse() - ) + req.return_value._content = analytics_data_api.AudienceExport.to_json( + analytics_data_api.AudienceExport() ) - request = analytics_data_api.RunRealtimeReportRequest() + request = analytics_data_api.GetAudienceExportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RunRealtimeReportResponse() + post.return_value = analytics_data_api.AudienceExport() + + client.get_audience_export( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_audience_export_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetAudienceExportRequest +): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/audienceExports/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_audience_export(request) + + +def test_get_audience_export_rest_flattened(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.AudienceExport() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/audienceExports/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.AudienceExport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.run_realtime_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + client.get_audience_export(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=properties/*/audienceExports/*}" % client.transport._host, + args[1], + ) -def test_run_realtime_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunRealtimeReportRequest -): +def test_get_audience_export_rest_flattened_error(transport: str = "rest"): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_realtime_report(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_audience_export( + analytics_data_api.GetAudienceExportRequest(), + name="name_value", + ) -def test_run_realtime_report_rest_error(): +def test_get_audience_export_rest_error(): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2828,42 +5098,149 @@ def test_run_realtime_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CheckCompatibilityRequest, + analytics_data_api.ListAudienceExportsRequest, dict, ], ) -def test_check_compatibility_rest(request_type): +def test_list_audience_exports_rest(request_type): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.CheckCompatibilityResponse() + return_value = analytics_data_api.ListAudienceExportsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.CheckCompatibilityResponse.pb(return_value) + return_value = analytics_data_api.ListAudienceExportsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.check_compatibility(request) + response = client.list_audience_exports(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.CheckCompatibilityResponse) + assert isinstance(response, pagers.ListAudienceExportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_audience_exports_rest_required_fields( + request_type=analytics_data_api.ListAudienceExportsRequest, +): + transport_class = transports.BetaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_audience_exports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_audience_exports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceExportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceExportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_audience_exports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_audience_exports_rest_unset_required_fields(): + transport = transports.BetaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_audience_exports._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_check_compatibility_rest_interceptors(null_interceptor): +def test_list_audience_exports_rest_interceptors(null_interceptor): transport = transports.BetaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2876,14 +5253,14 @@ def test_check_compatibility_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "post_check_compatibility" + transports.BetaAnalyticsDataRestInterceptor, "post_list_audience_exports" ) as post, mock.patch.object( - transports.BetaAnalyticsDataRestInterceptor, "pre_check_compatibility" + transports.BetaAnalyticsDataRestInterceptor, "pre_list_audience_exports" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CheckCompatibilityRequest.pb( - analytics_data_api.CheckCompatibilityRequest() + pb_message = analytics_data_api.ListAudienceExportsRequest.pb( + analytics_data_api.ListAudienceExportsRequest() ) transcode.return_value = { "method": "post", @@ -2896,20 +5273,20 @@ def test_check_compatibility_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_data_api.CheckCompatibilityResponse.to_json( - analytics_data_api.CheckCompatibilityResponse() + analytics_data_api.ListAudienceExportsResponse.to_json( + analytics_data_api.ListAudienceExportsResponse() ) ) - request = analytics_data_api.CheckCompatibilityRequest() + request = analytics_data_api.ListAudienceExportsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.CheckCompatibilityResponse() + post.return_value = analytics_data_api.ListAudienceExportsResponse() - client.check_compatibility( + client.list_audience_exports( request, metadata=[ ("key", "val"), @@ -2921,8 +5298,8 @@ def test_check_compatibility_rest_interceptors(null_interceptor): post.assert_called_once() -def test_check_compatibility_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.CheckCompatibilityRequest +def test_list_audience_exports_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.ListAudienceExportsRequest ): client = BetaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2930,7 +5307,7 @@ def test_check_compatibility_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2942,14 +5319,127 @@ def test_check_compatibility_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.check_compatibility(request) + client.list_audience_exports(request) -def test_check_compatibility_rest_error(): +def test_list_audience_exports_rest_flattened(): client = BetaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.ListAudienceExportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.ListAudienceExportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_audience_exports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=properties/*}/audienceExports" % client.transport._host, + args[1], + ) + + +def test_list_audience_exports_rest_flattened_error(transport: str = "rest"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_audience_exports( + analytics_data_api.ListAudienceExportsRequest(), + parent="parent_value", + ) + + +def test_list_audience_exports_rest_pager(transport: str = "rest"): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceExportsResponse( + audience_exports=[ + analytics_data_api.AudienceExport(), + analytics_data_api.AudienceExport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListAudienceExportsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audience_exports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceExport) for i in results) + + pages = list(client.list_audience_exports(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -3097,6 +5587,10 @@ def test_beta_analytics_data_base_transport(): "get_metadata", "run_realtime_report", "check_compatibility", + "create_audience_export", + "query_audience_export", + "get_audience_export", + "list_audience_exports", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3105,6 +5599,11 @@ def test_beta_analytics_data_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -3306,6 +5805,23 @@ def test_beta_analytics_data_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +def test_beta_analytics_data_rest_lro_client(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ @@ -3390,6 +5906,18 @@ def test_beta_analytics_data_client_transport_session_collision(transport_name): session1 = client1.transport.check_compatibility._session session2 = client2.transport.check_compatibility._session assert session1 != session2 + session1 = client1.transport.create_audience_export._session + session2 = client2.transport.create_audience_export._session + assert session1 != session2 + session1 = client1.transport.query_audience_export._session + session2 = client2.transport.query_audience_export._session + assert session1 != session2 + session1 = client1.transport.get_audience_export._session + session2 = client2.transport.get_audience_export._session + assert session1 != session2 + session1 = client1.transport.list_audience_exports._session + session2 = client2.transport.list_audience_exports._session + assert session1 != session2 def test_beta_analytics_data_grpc_transport_channel(): @@ -3518,8 +6046,65 @@ def test_beta_analytics_data_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_metadata_path(): +def test_beta_analytics_data_grpc_lro_client(): + client = BetaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_beta_analytics_data_grpc_lro_async_client(): + client = BetaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_audience_export_path(): property = "squid" + audience_export = "clam" + expected = "properties/{property}/audienceExports/{audience_export}".format( + property=property, + audience_export=audience_export, + ) + actual = BetaAnalyticsDataClient.audience_export_path(property, audience_export) + assert expected == actual + + +def test_parse_audience_export_path(): + expected = { + "property": "whelk", + "audience_export": "octopus", + } + path = BetaAnalyticsDataClient.audience_export_path(**expected) + + # Check that the path construction is reversible. + actual = BetaAnalyticsDataClient.parse_audience_export_path(path) + assert expected == actual + + +def test_metadata_path(): + property = "oyster" expected = "properties/{property}/metadata".format( property=property, ) @@ -3529,7 +6114,7 @@ def test_metadata_path(): def test_parse_metadata_path(): expected = { - "property": "clam", + "property": "nudibranch", } path = BetaAnalyticsDataClient.metadata_path(**expected) @@ -3539,7 +6124,7 @@ def test_parse_metadata_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3549,7 +6134,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = BetaAnalyticsDataClient.common_billing_account_path(**expected) @@ -3559,7 +6144,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -3569,7 +6154,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = BetaAnalyticsDataClient.common_folder_path(**expected) @@ -3579,7 +6164,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -3589,7 +6174,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = BetaAnalyticsDataClient.common_organization_path(**expected) @@ -3599,7 +6184,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -3609,7 +6194,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = BetaAnalyticsDataClient.common_project_path(**expected) @@ -3619,8 +6204,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3631,8 +6216,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = BetaAnalyticsDataClient.common_location_path(**expected) diff --git a/packages/google-apps-meet/.OwlBot.yaml b/packages/google-apps-meet/.OwlBot.yaml new file mode 100644 index 000000000000..59a80ef89e64 --- /dev/null +++ b/packages/google-apps-meet/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/apps/meet/(v.*)/.*-py + dest: /owl-bot-staging/google-apps-meet/$1 +api-name: google-apps-meet diff --git a/packages/google-apps-meet/.coveragerc b/packages/google-apps-meet/.coveragerc new file mode 100644 index 000000000000..290aec60f0e6 --- /dev/null +++ b/packages/google-apps-meet/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/apps/meet/__init__.py + google/apps/meet/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-apps-meet/.flake8 b/packages/google-apps-meet/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-apps-meet/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-apps-meet/.gitignore b/packages/google-apps-meet/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-apps-meet/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-apps-meet/.repo-metadata.json b/packages/google-apps-meet/.repo-metadata.json new file mode 100644 index 000000000000..f41d481b40a0 --- /dev/null +++ b/packages/google-apps-meet/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-apps-meet", + "name_pretty": "Google Meet API", + "api_description": "Create and manage meetings in Google Meet.", + "product_documentation": "https://developers.google.com/meet/api/guides/overview", + "client_documentation": "https://googleapis.dev/python/google-apps-meet/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1216362&template=1766418", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-apps-meet", + "api_id": "meet.googleapis.com", + "default_version": "v2beta", + "codeowner_team": "", + "api_shortname": "meet" +} diff --git a/packages/google-apps-meet/CHANGELOG.md b/packages/google-apps-meet/CHANGELOG.md new file mode 100644 index 000000000000..239f5316247e --- /dev/null +++ b/packages/google-apps-meet/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.0...google-apps-meet-v0.1.1) (2024-01-04) + + +### Features + +* [google-apps-meet] added start and end time fields to Recording and Transcript resources ([#12130](https://github.com/googleapis/google-cloud-python/issues/12130)) ([6679d16](https://github.com/googleapis/google-cloud-python/commit/6679d16e0fa93219c62ccbec2641dc68fbd7265b)) + +## 0.1.0 (2023-12-07) + + +### Features + +* add initial files for google.apps.meet.v2beta ([#12100](https://github.com/googleapis/google-cloud-python/issues/12100)) ([d99f5b0](https://github.com/googleapis/google-cloud-python/commit/d99f5b0ec5dcaa254bfa30dbf0495063a7a82374)) + +## Changelog diff --git a/packages/google-apps-meet/CODE_OF_CONDUCT.md b/packages/google-apps-meet/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-apps-meet/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-apps-meet/CONTRIBUTING.rst b/packages/google-apps-meet/CONTRIBUTING.rst new file mode 100644 index 000000000000..31df2453a561 --- /dev/null +++ b/packages/google-apps-meet/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-apps-meet + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-apps-meet/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-apps-meet/LICENSE b/packages/google-apps-meet/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-apps-meet/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-apps-meet/MANIFEST.in b/packages/google-apps-meet/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-apps-meet/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-apps-meet/README.rst b/packages/google-apps-meet/README.rst new file mode 100644 index 000000000000..f8d80407f4e1 --- /dev/null +++ b/packages/google-apps-meet/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Meet API +================================= + +|preview| |pypi| |versions| + +`Google Meet API`_: Create and manage meetings in Google Meet. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-apps-meet.svg + :target: https://pypi.org/project/google-apps-meet/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-apps-meet.svg + :target: https://pypi.org/project/google-apps-meet/ +.. _Google Meet API: https://developers.google.com/meet/api/guides/overview +.. _Client Library Documentation: https://googleapis.dev/python/google-apps-meet/latest +.. _Product Documentation: https://developers.google.com/meet/api/guides/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Meet API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Meet API.: https://developers.google.com/meet/api/guides/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-apps-meet + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-apps-meet + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Meet API + to see other available methods on the client. +- Read the `Google Meet API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Meet API Product documentation: https://developers.google.com/meet/api/guides/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-apps-meet/docs/CHANGELOG.md b/packages/google-apps-meet/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-apps-meet/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-apps-meet/docs/README.rst b/packages/google-apps-meet/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-apps-meet/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-apps-meet/docs/_static/custom.css b/packages/google-apps-meet/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-apps-meet/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-apps-meet/docs/_templates/layout.html b/packages/google-apps-meet/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-apps-meet/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-apps-meet/docs/conf.py b/packages/google-apps-meet/docs/conf.py new file mode 100644 index 000000000000..cfa92fb8d428 --- /dev/null +++ b/packages/google-apps-meet/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-apps-meet documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-apps-meet" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-apps-meet", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-apps-meet-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-apps-meet.tex", + "google-apps-meet Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-apps-meet", + "google-apps-meet Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-apps-meet", + "google-apps-meet Documentation", + author, + "google-apps-meet", + "google-apps-meet Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-apps-meet/docs/index.rst b/packages/google-apps-meet/docs/index.rst new file mode 100644 index 000000000000..e8e59615e6c8 --- /dev/null +++ b/packages/google-apps-meet/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + meet_v2beta/services_ + meet_v2beta/types_ + + +Changelog +--------- + +For a list of all ``google-apps-meet`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-apps-meet/docs/meet_v2beta/conference_records_service.rst b/packages/google-apps-meet/docs/meet_v2beta/conference_records_service.rst new file mode 100644 index 000000000000..686a355da5cb --- /dev/null +++ b/packages/google-apps-meet/docs/meet_v2beta/conference_records_service.rst @@ -0,0 +1,10 @@ +ConferenceRecordsService +------------------------------------------ + +.. automodule:: google.apps.meet_v2beta.services.conference_records_service + :members: + :inherited-members: + +.. automodule:: google.apps.meet_v2beta.services.conference_records_service.pagers + :members: + :inherited-members: diff --git a/packages/google-apps-meet/docs/meet_v2beta/services_.rst b/packages/google-apps-meet/docs/meet_v2beta/services_.rst new file mode 100644 index 000000000000..d64aa084b6c5 --- /dev/null +++ b/packages/google-apps-meet/docs/meet_v2beta/services_.rst @@ -0,0 +1,7 @@ +Services for Google Apps Meet v2beta API +======================================== +.. toctree:: + :maxdepth: 2 + + conference_records_service + spaces_service diff --git a/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst b/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst new file mode 100644 index 000000000000..3e799ddd3d66 --- /dev/null +++ b/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst @@ -0,0 +1,6 @@ +SpacesService +------------------------------- + +.. automodule:: google.apps.meet_v2beta.services.spaces_service + :members: + :inherited-members: diff --git a/packages/google-apps-meet/docs/meet_v2beta/types_.rst b/packages/google-apps-meet/docs/meet_v2beta/types_.rst new file mode 100644 index 000000000000..524ba9eec204 --- /dev/null +++ b/packages/google-apps-meet/docs/meet_v2beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Apps Meet v2beta API +===================================== + +.. automodule:: google.apps.meet_v2beta.types + :members: + :show-inheritance: diff --git a/packages/google-apps-meet/docs/multiprocessing.rst b/packages/google-apps-meet/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-apps-meet/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-apps-meet/google/apps/meet/__init__.py b/packages/google-apps-meet/google/apps/meet/__init__.py new file mode 100644 index 000000000000..2769401e90ca --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet/__init__.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.apps.meet import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.apps.meet_v2beta.services.conference_records_service.async_client import ( + ConferenceRecordsServiceAsyncClient, +) +from google.apps.meet_v2beta.services.conference_records_service.client import ( + ConferenceRecordsServiceClient, +) +from google.apps.meet_v2beta.services.spaces_service.async_client import ( + SpacesServiceAsyncClient, +) +from google.apps.meet_v2beta.services.spaces_service.client import SpacesServiceClient +from google.apps.meet_v2beta.types.resource import ( + ActiveConference, + AnonymousUser, + ConferenceRecord, + DocsDestination, + DriveDestination, + Participant, + ParticipantSession, + PhoneUser, + Recording, + SignedinUser, + Space, + SpaceConfig, + Transcript, + TranscriptEntry, +) +from google.apps.meet_v2beta.types.service import ( + CreateSpaceRequest, + EndActiveConferenceRequest, + GetConferenceRecordRequest, + GetParticipantRequest, + GetParticipantSessionRequest, + GetRecordingRequest, + GetSpaceRequest, + GetTranscriptEntryRequest, + GetTranscriptRequest, + ListConferenceRecordsRequest, + ListConferenceRecordsResponse, + ListParticipantSessionsRequest, + ListParticipantSessionsResponse, + ListParticipantsRequest, + ListParticipantsResponse, + ListRecordingsRequest, + ListRecordingsResponse, + ListTranscriptEntriesRequest, + ListTranscriptEntriesResponse, + ListTranscriptsRequest, + ListTranscriptsResponse, + UpdateSpaceRequest, +) + +__all__ = ( + "ConferenceRecordsServiceClient", + "ConferenceRecordsServiceAsyncClient", + "SpacesServiceClient", + "SpacesServiceAsyncClient", + "ActiveConference", + "AnonymousUser", + "ConferenceRecord", + "DocsDestination", + "DriveDestination", + "Participant", + "ParticipantSession", + "PhoneUser", + "Recording", + "SignedinUser", + "Space", + "SpaceConfig", + "Transcript", + "TranscriptEntry", + "CreateSpaceRequest", + "EndActiveConferenceRequest", + "GetConferenceRecordRequest", + "GetParticipantRequest", + "GetParticipantSessionRequest", + "GetRecordingRequest", + "GetSpaceRequest", + "GetTranscriptEntryRequest", + "GetTranscriptRequest", + "ListConferenceRecordsRequest", + "ListConferenceRecordsResponse", + "ListParticipantSessionsRequest", + "ListParticipantSessionsResponse", + "ListParticipantsRequest", + "ListParticipantsResponse", + "ListRecordingsRequest", + "ListRecordingsResponse", + "ListTranscriptEntriesRequest", + "ListTranscriptEntriesResponse", + "ListTranscriptsRequest", + "ListTranscriptsResponse", + "UpdateSpaceRequest", +) diff --git a/packages/google-apps-meet/google/apps/meet/gapic_version.py b/packages/google-apps-meet/google/apps/meet/gapic_version.py new file mode 100644 index 000000000000..123d60293175 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet/py.typed b/packages/google-apps-meet/google/apps/meet/py.typed new file mode 100644 index 000000000000..6bbef0e04410 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-apps-meet package uses inline types. diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py new file mode 100644 index 000000000000..4acf3b15228e --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.apps.meet_v2beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.conference_records_service import ( + ConferenceRecordsServiceAsyncClient, + ConferenceRecordsServiceClient, +) +from .services.spaces_service import SpacesServiceAsyncClient, SpacesServiceClient +from .types.resource import ( + ActiveConference, + AnonymousUser, + ConferenceRecord, + DocsDestination, + DriveDestination, + Participant, + ParticipantSession, + PhoneUser, + Recording, + SignedinUser, + Space, + SpaceConfig, + Transcript, + TranscriptEntry, +) +from .types.service import ( + CreateSpaceRequest, + EndActiveConferenceRequest, + GetConferenceRecordRequest, + GetParticipantRequest, + GetParticipantSessionRequest, + GetRecordingRequest, + GetSpaceRequest, + GetTranscriptEntryRequest, + GetTranscriptRequest, + ListConferenceRecordsRequest, + ListConferenceRecordsResponse, + ListParticipantSessionsRequest, + ListParticipantSessionsResponse, + ListParticipantsRequest, + ListParticipantsResponse, + ListRecordingsRequest, + ListRecordingsResponse, + ListTranscriptEntriesRequest, + ListTranscriptEntriesResponse, + ListTranscriptsRequest, + ListTranscriptsResponse, + UpdateSpaceRequest, +) + +__all__ = ( + "ConferenceRecordsServiceAsyncClient", + "SpacesServiceAsyncClient", + "ActiveConference", + "AnonymousUser", + "ConferenceRecord", + "ConferenceRecordsServiceClient", + "CreateSpaceRequest", + "DocsDestination", + "DriveDestination", + "EndActiveConferenceRequest", + "GetConferenceRecordRequest", + "GetParticipantRequest", + "GetParticipantSessionRequest", + "GetRecordingRequest", + "GetSpaceRequest", + "GetTranscriptEntryRequest", + "GetTranscriptRequest", + "ListConferenceRecordsRequest", + "ListConferenceRecordsResponse", + "ListParticipantSessionsRequest", + "ListParticipantSessionsResponse", + "ListParticipantsRequest", + "ListParticipantsResponse", + "ListRecordingsRequest", + "ListRecordingsResponse", + "ListTranscriptEntriesRequest", + "ListTranscriptEntriesResponse", + "ListTranscriptsRequest", + "ListTranscriptsResponse", + "Participant", + "ParticipantSession", + "PhoneUser", + "Recording", + "SignedinUser", + "Space", + "SpaceConfig", + "SpacesServiceClient", + "Transcript", + "TranscriptEntry", + "UpdateSpaceRequest", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json new file mode 100644 index 000000000000..07fcccd295b7 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json @@ -0,0 +1,287 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.apps.meet_v2beta", + "protoPackage": "google.apps.meet.v2beta", + "schema": "1.0", + "services": { + "ConferenceRecordsService": { + "clients": { + "grpc": { + "libraryClient": "ConferenceRecordsServiceClient", + "rpcs": { + "GetConferenceRecord": { + "methods": [ + "get_conference_record" + ] + }, + "GetParticipant": { + "methods": [ + "get_participant" + ] + }, + "GetParticipantSession": { + "methods": [ + "get_participant_session" + ] + }, + "GetRecording": { + "methods": [ + "get_recording" + ] + }, + "GetTranscript": { + "methods": [ + "get_transcript" + ] + }, + "GetTranscriptEntry": { + "methods": [ + "get_transcript_entry" + ] + }, + "ListConferenceRecords": { + "methods": [ + "list_conference_records" + ] + }, + "ListParticipantSessions": { + "methods": [ + "list_participant_sessions" + ] + }, + "ListParticipants": { + "methods": [ + "list_participants" + ] + }, + "ListRecordings": { + "methods": [ + "list_recordings" + ] + }, + "ListTranscriptEntries": { + "methods": [ + "list_transcript_entries" + ] + }, + "ListTranscripts": { + "methods": [ + "list_transcripts" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConferenceRecordsServiceAsyncClient", + "rpcs": { + "GetConferenceRecord": { + "methods": [ + "get_conference_record" + ] + }, + "GetParticipant": { + "methods": [ + "get_participant" + ] + }, + "GetParticipantSession": { + "methods": [ + "get_participant_session" + ] + }, + "GetRecording": { + "methods": [ + "get_recording" + ] + }, + "GetTranscript": { + "methods": [ + "get_transcript" + ] + }, + "GetTranscriptEntry": { + "methods": [ + "get_transcript_entry" + ] + }, + "ListConferenceRecords": { + "methods": [ + "list_conference_records" + ] + }, + "ListParticipantSessions": { + "methods": [ + "list_participant_sessions" + ] + }, + "ListParticipants": { + "methods": [ + "list_participants" + ] + }, + "ListRecordings": { + "methods": [ + "list_recordings" + ] + }, + "ListTranscriptEntries": { + "methods": [ + "list_transcript_entries" + ] + }, + "ListTranscripts": { + "methods": [ + "list_transcripts" + ] + } + } + }, + "rest": { + "libraryClient": "ConferenceRecordsServiceClient", + "rpcs": { + "GetConferenceRecord": { + "methods": [ + "get_conference_record" + ] + }, + "GetParticipant": { + "methods": [ + "get_participant" + ] + }, + "GetParticipantSession": { + "methods": [ + "get_participant_session" + ] + }, + "GetRecording": { + "methods": [ + "get_recording" + ] + }, + "GetTranscript": { + "methods": [ + "get_transcript" + ] + }, + "GetTranscriptEntry": { + "methods": [ + "get_transcript_entry" + ] + }, + "ListConferenceRecords": { + "methods": [ + "list_conference_records" + ] + }, + "ListParticipantSessions": { + "methods": [ + "list_participant_sessions" + ] + }, + "ListParticipants": { + "methods": [ + "list_participants" + ] + }, + "ListRecordings": { + "methods": [ + "list_recordings" + ] + }, + "ListTranscriptEntries": { + "methods": [ + "list_transcript_entries" + ] + }, + "ListTranscripts": { + "methods": [ + "list_transcripts" + ] + } + } + } + } + }, + "SpacesService": { + "clients": { + "grpc": { + "libraryClient": "SpacesServiceClient", + "rpcs": { + "CreateSpace": { + "methods": [ + "create_space" + ] + }, + "EndActiveConference": { + "methods": [ + "end_active_conference" + ] + }, + "GetSpace": { + "methods": [ + "get_space" + ] + }, + "UpdateSpace": { + "methods": [ + "update_space" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SpacesServiceAsyncClient", + "rpcs": { + "CreateSpace": { + "methods": [ + "create_space" + ] + }, + "EndActiveConference": { + "methods": [ + "end_active_conference" + ] + }, + "GetSpace": { + "methods": [ + "get_space" + ] + }, + "UpdateSpace": { + "methods": [ + "update_space" + ] + } + } + }, + "rest": { + "libraryClient": "SpacesServiceClient", + "rpcs": { + "CreateSpace": { + "methods": [ + "create_space" + ] + }, + "EndActiveConference": { + "methods": [ + "end_active_conference" + ] + }, + "GetSpace": { + "methods": [ + "get_space" + ] + }, + "UpdateSpace": { + "methods": [ + "update_space" + ] + } + } + } + } + } + } +} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py new file mode 100644 index 000000000000..123d60293175 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/py.typed b/packages/google-apps-meet/google/apps/meet_v2beta/py.typed new file mode 100644 index 000000000000..6bbef0e04410 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-apps-meet package uses inline types. diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/__init__.py new file mode 100644 index 000000000000..86d7a0ae95f1 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ConferenceRecordsServiceAsyncClient +from .client import ConferenceRecordsServiceClient + +__all__ = ( + "ConferenceRecordsServiceClient", + "ConferenceRecordsServiceAsyncClient", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py new file mode 100644 index 000000000000..e19d47b5a540 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py @@ -0,0 +1,1696 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.apps.meet_v2beta.services.conference_records_service import pagers +from google.apps.meet_v2beta.types import resource, service + +from .client import ConferenceRecordsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ConferenceRecordsServiceTransport +from .transports.grpc_asyncio import ConferenceRecordsServiceGrpcAsyncIOTransport + + +class ConferenceRecordsServiceAsyncClient: + """REST API for services dealing with conference records.""" + + _client: ConferenceRecordsServiceClient + + DEFAULT_ENDPOINT = ConferenceRecordsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConferenceRecordsServiceClient.DEFAULT_MTLS_ENDPOINT + + conference_record_path = staticmethod( + ConferenceRecordsServiceClient.conference_record_path + ) + parse_conference_record_path = staticmethod( + ConferenceRecordsServiceClient.parse_conference_record_path + ) + participant_path = staticmethod(ConferenceRecordsServiceClient.participant_path) + parse_participant_path = staticmethod( + ConferenceRecordsServiceClient.parse_participant_path + ) + participant_session_path = staticmethod( + ConferenceRecordsServiceClient.participant_session_path + ) + parse_participant_session_path = staticmethod( + ConferenceRecordsServiceClient.parse_participant_session_path + ) + recording_path = staticmethod(ConferenceRecordsServiceClient.recording_path) + parse_recording_path = staticmethod( + ConferenceRecordsServiceClient.parse_recording_path + ) + space_path = staticmethod(ConferenceRecordsServiceClient.space_path) + parse_space_path = staticmethod(ConferenceRecordsServiceClient.parse_space_path) + transcript_path = staticmethod(ConferenceRecordsServiceClient.transcript_path) + parse_transcript_path = staticmethod( + ConferenceRecordsServiceClient.parse_transcript_path + ) + transcript_entry_path = staticmethod( + ConferenceRecordsServiceClient.transcript_entry_path + ) + parse_transcript_entry_path = staticmethod( + ConferenceRecordsServiceClient.parse_transcript_entry_path + ) + common_billing_account_path = staticmethod( + ConferenceRecordsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConferenceRecordsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ConferenceRecordsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ConferenceRecordsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ConferenceRecordsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConferenceRecordsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + ConferenceRecordsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + ConferenceRecordsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + ConferenceRecordsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + ConferenceRecordsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConferenceRecordsServiceAsyncClient: The constructed client. + """ + return ConferenceRecordsServiceClient.from_service_account_info.__func__(ConferenceRecordsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConferenceRecordsServiceAsyncClient: The constructed client. + """ + return ConferenceRecordsServiceClient.from_service_account_file.__func__(ConferenceRecordsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConferenceRecordsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConferenceRecordsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConferenceRecordsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConferenceRecordsServiceClient).get_transport_class, + type(ConferenceRecordsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConferenceRecordsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the conference records service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConferenceRecordsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConferenceRecordsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_conference_record( + self, + request: Optional[Union[service.GetConferenceRecordRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ConferenceRecord: + r"""`Developer + Preview `__. + Gets a conference record by conference ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_conference_record(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetConferenceRecordRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conference_record(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetConferenceRecordRequest, dict]]): + The request object. Request to get a conference record. + name (:class:`str`): + Required. Resource name of the + conference. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.ConferenceRecord: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Single instance of a meeting held in a space. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetConferenceRecordRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_conference_record, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_conference_records( + self, + request: Optional[Union[service.ListConferenceRecordsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConferenceRecordsAsyncPager: + r"""`Developer + Preview `__. + Lists the conference records by start time and in descending + order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_conference_records(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListConferenceRecordsRequest( + ) + + # Make the request + page_result = client.list_conference_records(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListConferenceRecordsRequest, dict]]): + The request object. Request to fetch list of conference + records per user. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListConferenceRecordsAsyncPager: + Response of ListConferenceRecords + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = service.ListConferenceRecordsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_conference_records, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConferenceRecordsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_participant( + self, + request: Optional[Union[service.GetParticipantRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Participant: + r"""`Developer + Preview `__. + Gets a participant by participant ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_participant(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetParticipantRequest, dict]]): + The request object. Request to get a Participant. + name (:class:`str`): + Required. Resource name of the + participant. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Participant: + [Developer Preview](\ https://developers.google.com/workspace/preview). + User who attended or is attending a conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetParticipantRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_participant, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_participants( + self, + request: Optional[Union[service.ListParticipantsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListParticipantsAsyncPager: + r"""`Developer + Preview `__. + Lists the participants in a conference record, by default + ordered by join time and in descending order. This API supports + ``fields`` as standard parameters like every other API. However, + when the ``fields`` request parameter is omitted, this API + defaults to ``'participants/*, next_page_token'``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_participants(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListParticipantsRequest, dict]]): + The request object. Request to fetch list of participant + per conference. + parent (:class:`str`): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantsAsyncPager: + Response of ListParticipants method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListParticipantsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_participants, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListParticipantsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_participant_session( + self, + request: Optional[Union[service.GetParticipantSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ParticipantSession: + r"""`Developer + Preview `__. + Gets a participant session by participant session ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_participant_session(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetParticipantSessionRequest, dict]]): + The request object. Request to get a participant session. + name (:class:`str`): + Required. Resource name of the + participant. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.ParticipantSession: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Refers to each unique join/leave session when a user + joins a conference from a device. Note that any time + a user joins the conference a new unique ID is + assigned. That means if a user joins a space multiple + times from the same device, they're assigned + different IDs, and are also be treated as different + participant sessions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetParticipantSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_participant_session, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_participant_sessions( + self, + request: Optional[Union[service.ListParticipantSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListParticipantSessionsAsyncPager: + r"""`Developer + Preview `__. + Lists the participant sessions of a participant in a conference + record, by default ordered by join time and in descending order. + This API supports ``fields`` as standard parameters like every + other API. However, when the ``fields`` request parameter is + omitted this API defaults to + ``'participantsessions/*, next_page_token'``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_participant_sessions(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participant_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListParticipantSessionsRequest, dict]]): + The request object. Request to fetch list of participant + sessions per conference record per + participant. + parent (:class:`str`): + Required. Format: + ``conferenceRecords/{conference_record}/participants/{participant}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantSessionsAsyncPager: + Response of ListParticipants method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListParticipantSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_participant_sessions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListParticipantSessionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_recording( + self, + request: Optional[Union[service.GetRecordingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Recording: + r"""`Developer + Preview `__. + Gets a recording by recording ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_recording(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetRecordingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_recording(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetRecordingRequest, dict]]): + The request object. Request message for GetRecording + method. + name (:class:`str`): + Required. Resource name of the + recording. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Recording: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Metadata about a recording created during a + conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetRecordingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_recording, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_recordings( + self, + request: Optional[Union[service.ListRecordingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRecordingsAsyncPager: + r"""`Developer + Preview `__. + Lists the recording resources from the conference record. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_recordings(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListRecordingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recordings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListRecordingsRequest, dict]]): + The request object. Request for ListRecordings method. + parent (:class:`str`): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListRecordingsAsyncPager: + Response for ListRecordings method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListRecordingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_recordings, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRecordingsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_transcript( + self, + request: Optional[Union[service.GetTranscriptRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Transcript: + r"""`Developer + Preview `__. + Gets a transcript by transcript ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_transcript(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transcript(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetTranscriptRequest, dict]]): + The request object. Request for GetTranscript method. + name (:class:`str`): + Required. Resource name of the + transcript. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Transcript: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Metadata for a transcript generated from a + conference. It refers to the ASR (Automatic Speech + Recognition) result of user's speech during the + conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetTranscriptRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transcript, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_transcripts( + self, + request: Optional[Union[service.ListTranscriptsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTranscriptsAsyncPager: + r"""`Developer + Preview `__. + Lists the set of transcripts from the conference record. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_transcripts(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcripts(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListTranscriptsRequest, dict]]): + The request object. Request for ListTranscripts method. + parent (:class:`str`): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptsAsyncPager: + Response for ListTranscripts method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListTranscriptsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transcripts, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTranscriptsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_transcript_entry( + self, + request: Optional[Union[service.GetTranscriptEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.TranscriptEntry: + r"""`Developer + Preview `__. + Gets a ``TranscriptEntry`` resource by entry ID. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_transcript_entry(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transcript_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetTranscriptEntryRequest, dict]]): + The request object. Request for GetTranscriptEntry + method. + name (:class:`str`): + Required. Resource name of the ``TranscriptEntry``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.TranscriptEntry: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Single entry for one user’s speech during a + transcript session. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetTranscriptEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transcript_entry, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_transcript_entries( + self, + request: Optional[Union[service.ListTranscriptEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTranscriptEntriesAsyncPager: + r"""`Developer + Preview `__. + Lists the structured transcript entries per transcript. By + default, ordered by start time and in ascending order. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_transcript_entries(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcript_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListTranscriptEntriesRequest, dict]]): + The request object. Request for ListTranscriptEntries + method. + parent (:class:`str`): + Required. Format: + ``conferenceRecords/{conference_record}/transcripts/{transcript}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesAsyncPager: + Response for ListTranscriptEntries + method + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListTranscriptEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transcript_entries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTranscriptEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ConferenceRecordsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConferenceRecordsServiceAsyncClient",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py new file mode 100644 index 000000000000..cb4c9c4143e4 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py @@ -0,0 +1,1906 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + +from google.apps.meet_v2beta.services.conference_records_service import pagers +from google.apps.meet_v2beta.types import resource, service + +from .transports.base import DEFAULT_CLIENT_INFO, ConferenceRecordsServiceTransport +from .transports.grpc import ConferenceRecordsServiceGrpcTransport +from .transports.grpc_asyncio import ConferenceRecordsServiceGrpcAsyncIOTransport +from .transports.rest import ConferenceRecordsServiceRestTransport + + +class ConferenceRecordsServiceClientMeta(type): + """Metaclass for the ConferenceRecordsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ConferenceRecordsServiceTransport]] + _transport_registry["grpc"] = ConferenceRecordsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ConferenceRecordsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ConferenceRecordsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConferenceRecordsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConferenceRecordsServiceClient(metaclass=ConferenceRecordsServiceClientMeta): + """REST API for services dealing with conference records.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "meet.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConferenceRecordsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConferenceRecordsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConferenceRecordsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConferenceRecordsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def conference_record_path( + conference_record: str, + ) -> str: + """Returns a fully-qualified conference_record string.""" + return "conferenceRecords/{conference_record}".format( + conference_record=conference_record, + ) + + @staticmethod + def parse_conference_record_path(path: str) -> Dict[str, str]: + """Parses a conference_record path into its component segments.""" + m = re.match(r"^conferenceRecords/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def participant_path( + conference_record: str, + participant: str, + ) -> str: + """Returns a fully-qualified participant string.""" + return ( + "conferenceRecords/{conference_record}/participants/{participant}".format( + conference_record=conference_record, + participant=participant, + ) + ) + + @staticmethod + def parse_participant_path(path: str) -> Dict[str, str]: + """Parses a participant path into its component segments.""" + m = re.match( + r"^conferenceRecords/(?P.+?)/participants/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def participant_session_path( + conference_record: str, + participant: str, + participant_session: str, + ) -> str: + """Returns a fully-qualified participant_session string.""" + return "conferenceRecords/{conference_record}/participants/{participant}/participantSessions/{participant_session}".format( + conference_record=conference_record, + participant=participant, + participant_session=participant_session, + ) + + @staticmethod + def parse_participant_session_path(path: str) -> Dict[str, str]: + """Parses a participant_session path into its component segments.""" + m = re.match( + r"^conferenceRecords/(?P.+?)/participants/(?P.+?)/participantSessions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def recording_path( + conference_record: str, + recording: str, + ) -> str: + """Returns a fully-qualified recording string.""" + return "conferenceRecords/{conference_record}/recordings/{recording}".format( + conference_record=conference_record, + recording=recording, + ) + + @staticmethod + def parse_recording_path(path: str) -> Dict[str, str]: + """Parses a recording path into its component segments.""" + m = re.match( + r"^conferenceRecords/(?P.+?)/recordings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def space_path( + space: str, + ) -> str: + """Returns a fully-qualified space string.""" + return "spaces/{space}".format( + space=space, + ) + + @staticmethod + def parse_space_path(path: str) -> Dict[str, str]: + """Parses a space path into its component segments.""" + m = re.match(r"^spaces/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def transcript_path( + conference_record: str, + transcript: str, + ) -> str: + """Returns a fully-qualified transcript string.""" + return "conferenceRecords/{conference_record}/transcripts/{transcript}".format( + conference_record=conference_record, + transcript=transcript, + ) + + @staticmethod + def parse_transcript_path(path: str) -> Dict[str, str]: + """Parses a transcript path into its component segments.""" + m = re.match( + r"^conferenceRecords/(?P.+?)/transcripts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transcript_entry_path( + conference_record: str, + transcript: str, + entry: str, + ) -> str: + """Returns a fully-qualified transcript_entry string.""" + return "conferenceRecords/{conference_record}/transcripts/{transcript}/entries/{entry}".format( + conference_record=conference_record, + transcript=transcript, + entry=entry, + ) + + @staticmethod + def parse_transcript_entry_path(path: str) -> Dict[str, str]: + """Parses a transcript_entry path into its component segments.""" + m = re.match( + r"^conferenceRecords/(?P.+?)/transcripts/(?P.+?)/entries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConferenceRecordsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the conference records service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConferenceRecordsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConferenceRecordsServiceTransport): + # transport is a ConferenceRecordsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_conference_record( + self, + request: Optional[Union[service.GetConferenceRecordRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ConferenceRecord: + r"""`Developer + Preview `__. + Gets a conference record by conference ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_conference_record(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetConferenceRecordRequest( + name="name_value", + ) + + # Make the request + response = client.get_conference_record(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetConferenceRecordRequest, dict]): + The request object. Request to get a conference record. + name (str): + Required. Resource name of the + conference. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.ConferenceRecord: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Single instance of a meeting held in a space. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetConferenceRecordRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetConferenceRecordRequest): + request = service.GetConferenceRecordRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_conference_record] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_conference_records( + self, + request: Optional[Union[service.ListConferenceRecordsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConferenceRecordsPager: + r"""`Developer + Preview `__. + Lists the conference records by start time and in descending + order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_conference_records(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListConferenceRecordsRequest( + ) + + # Make the request + page_result = client.list_conference_records(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListConferenceRecordsRequest, dict]): + The request object. Request to fetch list of conference + records per user. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListConferenceRecordsPager: + Response of ListConferenceRecords + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ListConferenceRecordsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListConferenceRecordsRequest): + request = service.ListConferenceRecordsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_conference_records] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConferenceRecordsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_participant( + self, + request: Optional[Union[service.GetParticipantRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Participant: + r"""`Developer + Preview `__. + Gets a participant by participant ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_participant(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetParticipantRequest, dict]): + The request object. Request to get a Participant. + name (str): + Required. Resource name of the + participant. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Participant: + [Developer Preview](\ https://developers.google.com/workspace/preview). + User who attended or is attending a conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetParticipantRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetParticipantRequest): + request = service.GetParticipantRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_participant] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_participants( + self, + request: Optional[Union[service.ListParticipantsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListParticipantsPager: + r"""`Developer + Preview `__. + Lists the participants in a conference record, by default + ordered by join time and in descending order. This API supports + ``fields`` as standard parameters like every other API. However, + when the ``fields`` request parameter is omitted, this API + defaults to ``'participants/*, next_page_token'``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_participants(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListParticipantsRequest, dict]): + The request object. Request to fetch list of participant + per conference. + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantsPager: + Response of ListParticipants method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListParticipantsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListParticipantsRequest): + request = service.ListParticipantsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_participants] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListParticipantsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_participant_session( + self, + request: Optional[Union[service.GetParticipantSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ParticipantSession: + r"""`Developer + Preview `__. + Gets a participant session by participant session ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_participant_session(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetParticipantSessionRequest, dict]): + The request object. Request to get a participant session. + name (str): + Required. Resource name of the + participant. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.ParticipantSession: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Refers to each unique join/leave session when a user + joins a conference from a device. Note that any time + a user joins the conference a new unique ID is + assigned. That means if a user joins a space multiple + times from the same device, they're assigned + different IDs, and are also be treated as different + participant sessions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetParticipantSessionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetParticipantSessionRequest): + request = service.GetParticipantSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_participant_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_participant_sessions( + self, + request: Optional[Union[service.ListParticipantSessionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListParticipantSessionsPager: + r"""`Developer + Preview `__. + Lists the participant sessions of a participant in a conference + record, by default ordered by join time and in descending order. + This API supports ``fields`` as standard parameters like every + other API. However, when the ``fields`` request parameter is + omitted this API defaults to + ``'participantsessions/*, next_page_token'``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_participant_sessions(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participant_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListParticipantSessionsRequest, dict]): + The request object. Request to fetch list of participant + sessions per conference record per + participant. + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}/participants/{participant}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantSessionsPager: + Response of ListParticipants method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListParticipantSessionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListParticipantSessionsRequest): + request = service.ListParticipantSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_participant_sessions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListParticipantSessionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_recording( + self, + request: Optional[Union[service.GetRecordingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Recording: + r"""`Developer + Preview `__. + Gets a recording by recording ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_recording(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetRecordingRequest( + name="name_value", + ) + + # Make the request + response = client.get_recording(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetRecordingRequest, dict]): + The request object. Request message for GetRecording + method. + name (str): + Required. Resource name of the + recording. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Recording: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Metadata about a recording created during a + conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetRecordingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetRecordingRequest): + request = service.GetRecordingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_recording] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_recordings( + self, + request: Optional[Union[service.ListRecordingsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRecordingsPager: + r"""`Developer + Preview `__. + Lists the recording resources from the conference record. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_recordings(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListRecordingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recordings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListRecordingsRequest, dict]): + The request object. Request for ListRecordings method. + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListRecordingsPager: + Response for ListRecordings method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListRecordingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListRecordingsRequest): + request = service.ListRecordingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_recordings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRecordingsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_transcript( + self, + request: Optional[Union[service.GetTranscriptRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Transcript: + r"""`Developer + Preview `__. + Gets a transcript by transcript ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_transcript(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptRequest( + name="name_value", + ) + + # Make the request + response = client.get_transcript(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetTranscriptRequest, dict]): + The request object. Request for GetTranscript method. + name (str): + Required. Resource name of the + transcript. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Transcript: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Metadata for a transcript generated from a + conference. It refers to the ASR (Automatic Speech + Recognition) result of user's speech during the + conference. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetTranscriptRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetTranscriptRequest): + request = service.GetTranscriptRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transcript] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_transcripts( + self, + request: Optional[Union[service.ListTranscriptsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTranscriptsPager: + r"""`Developer + Preview `__. + Lists the set of transcripts from the conference record. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_transcripts(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcripts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListTranscriptsRequest, dict]): + The request object. Request for ListTranscripts method. + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptsPager: + Response for ListTranscripts method. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListTranscriptsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListTranscriptsRequest): + request = service.ListTranscriptsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transcripts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTranscriptsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_transcript_entry( + self, + request: Optional[Union[service.GetTranscriptEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.TranscriptEntry: + r"""`Developer + Preview `__. + Gets a ``TranscriptEntry`` resource by entry ID. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_transcript_entry(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_transcript_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetTranscriptEntryRequest, dict]): + The request object. Request for GetTranscriptEntry + method. + name (str): + Required. Resource name of the ``TranscriptEntry``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.TranscriptEntry: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Single entry for one user’s speech during a + transcript session. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetTranscriptEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetTranscriptEntryRequest): + request = service.GetTranscriptEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transcript_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_transcript_entries( + self, + request: Optional[Union[service.ListTranscriptEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTranscriptEntriesPager: + r"""`Developer + Preview `__. + Lists the structured transcript entries per transcript. By + default, ordered by start time and in ascending order. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_transcript_entries(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcript_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListTranscriptEntriesRequest, dict]): + The request object. Request for ListTranscriptEntries + method. + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}/transcripts/{transcript}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesPager: + Response for ListTranscriptEntries + method + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListTranscriptEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListTranscriptEntriesRequest): + request = service.ListTranscriptEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transcript_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTranscriptEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConferenceRecordsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConferenceRecordsServiceClient",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/pagers.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/pagers.py new file mode 100644 index 000000000000..1e8be9a578bc --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/pagers.py @@ -0,0 +1,795 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.apps.meet_v2beta.types import resource, service + + +class ListConferenceRecordsPager: + """A pager for iterating through ``list_conference_records`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListConferenceRecordsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``conference_records`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConferenceRecords`` requests and continue to iterate + through the ``conference_records`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListConferenceRecordsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListConferenceRecordsResponse], + request: service.ListConferenceRecordsRequest, + response: service.ListConferenceRecordsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListConferenceRecordsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListConferenceRecordsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListConferenceRecordsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListConferenceRecordsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.ConferenceRecord]: + for page in self.pages: + yield from page.conference_records + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConferenceRecordsAsyncPager: + """A pager for iterating through ``list_conference_records`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListConferenceRecordsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``conference_records`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConferenceRecords`` requests and continue to iterate + through the ``conference_records`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListConferenceRecordsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListConferenceRecordsResponse]], + request: service.ListConferenceRecordsRequest, + response: service.ListConferenceRecordsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListConferenceRecordsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListConferenceRecordsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListConferenceRecordsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListConferenceRecordsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.ConferenceRecord]: + async def async_generator(): + async for page in self.pages: + for response in page.conference_records: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListParticipantsPager: + """A pager for iterating through ``list_participants`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListParticipantsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``participants`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListParticipants`` requests and continue to iterate + through the ``participants`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListParticipantsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListParticipantsResponse], + request: service.ListParticipantsRequest, + response: service.ListParticipantsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListParticipantsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListParticipantsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListParticipantsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListParticipantsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.Participant]: + for page in self.pages: + yield from page.participants + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListParticipantsAsyncPager: + """A pager for iterating through ``list_participants`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListParticipantsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``participants`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListParticipants`` requests and continue to iterate + through the ``participants`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListParticipantsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListParticipantsResponse]], + request: service.ListParticipantsRequest, + response: service.ListParticipantsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListParticipantsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListParticipantsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListParticipantsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListParticipantsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.Participant]: + async def async_generator(): + async for page in self.pages: + for response in page.participants: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListParticipantSessionsPager: + """A pager for iterating through ``list_participant_sessions`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListParticipantSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``participant_sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListParticipantSessions`` requests and continue to iterate + through the ``participant_sessions`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListParticipantSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListParticipantSessionsResponse], + request: service.ListParticipantSessionsRequest, + response: service.ListParticipantSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListParticipantSessionsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListParticipantSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListParticipantSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListParticipantSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.ParticipantSession]: + for page in self.pages: + yield from page.participant_sessions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListParticipantSessionsAsyncPager: + """A pager for iterating through ``list_participant_sessions`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListParticipantSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``participant_sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListParticipantSessions`` requests and continue to iterate + through the ``participant_sessions`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListParticipantSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListParticipantSessionsResponse]], + request: service.ListParticipantSessionsRequest, + response: service.ListParticipantSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListParticipantSessionsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListParticipantSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListParticipantSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListParticipantSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.ParticipantSession]: + async def async_generator(): + async for page in self.pages: + for response in page.participant_sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRecordingsPager: + """A pager for iterating through ``list_recordings`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListRecordingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``recordings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRecordings`` requests and continue to iterate + through the ``recordings`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListRecordingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListRecordingsResponse], + request: service.ListRecordingsRequest, + response: service.ListRecordingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListRecordingsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListRecordingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListRecordingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListRecordingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.Recording]: + for page in self.pages: + yield from page.recordings + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRecordingsAsyncPager: + """A pager for iterating through ``list_recordings`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListRecordingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``recordings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRecordings`` requests and continue to iterate + through the ``recordings`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListRecordingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListRecordingsResponse]], + request: service.ListRecordingsRequest, + response: service.ListRecordingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListRecordingsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListRecordingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListRecordingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListRecordingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.Recording]: + async def async_generator(): + async for page in self.pages: + for response in page.recordings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTranscriptsPager: + """A pager for iterating through ``list_transcripts`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListTranscriptsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transcripts`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTranscripts`` requests and continue to iterate + through the ``transcripts`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListTranscriptsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListTranscriptsResponse], + request: service.ListTranscriptsRequest, + response: service.ListTranscriptsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListTranscriptsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListTranscriptsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTranscriptsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListTranscriptsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.Transcript]: + for page in self.pages: + yield from page.transcripts + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTranscriptsAsyncPager: + """A pager for iterating through ``list_transcripts`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListTranscriptsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transcripts`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTranscripts`` requests and continue to iterate + through the ``transcripts`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListTranscriptsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListTranscriptsResponse]], + request: service.ListTranscriptsRequest, + response: service.ListTranscriptsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListTranscriptsRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListTranscriptsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTranscriptsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListTranscriptsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.Transcript]: + async def async_generator(): + async for page in self.pages: + for response in page.transcripts: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTranscriptEntriesPager: + """A pager for iterating through ``list_transcript_entries`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListTranscriptEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transcript_entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTranscriptEntries`` requests and continue to iterate + through the ``transcript_entries`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListTranscriptEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListTranscriptEntriesResponse], + request: service.ListTranscriptEntriesRequest, + response: service.ListTranscriptEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListTranscriptEntriesRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListTranscriptEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTranscriptEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListTranscriptEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resource.TranscriptEntry]: + for page in self.pages: + yield from page.transcript_entries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTranscriptEntriesAsyncPager: + """A pager for iterating through ``list_transcript_entries`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListTranscriptEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transcript_entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTranscriptEntries`` requests and continue to iterate + through the ``transcript_entries`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListTranscriptEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListTranscriptEntriesResponse]], + request: service.ListTranscriptEntriesRequest, + response: service.ListTranscriptEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListTranscriptEntriesRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListTranscriptEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListTranscriptEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListTranscriptEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.TranscriptEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.transcript_entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/__init__.py new file mode 100644 index 000000000000..f81d2b6fd59b --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConferenceRecordsServiceTransport +from .grpc import ConferenceRecordsServiceGrpcTransport +from .grpc_asyncio import ConferenceRecordsServiceGrpcAsyncIOTransport +from .rest import ( + ConferenceRecordsServiceRestInterceptor, + ConferenceRecordsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ConferenceRecordsServiceTransport]] +_transport_registry["grpc"] = ConferenceRecordsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ConferenceRecordsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ConferenceRecordsServiceRestTransport + +__all__ = ( + "ConferenceRecordsServiceTransport", + "ConferenceRecordsServiceGrpcTransport", + "ConferenceRecordsServiceGrpcAsyncIOTransport", + "ConferenceRecordsServiceRestTransport", + "ConferenceRecordsServiceRestInterceptor", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py new file mode 100644 index 000000000000..7bc4cbef4d69 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py @@ -0,0 +1,433 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version +from google.apps.meet_v2beta.types import resource, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ConferenceRecordsServiceTransport(abc.ABC): + """Abstract transport class for ConferenceRecordsService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "meet.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_conference_record: gapic_v1.method.wrap_method( + self.get_conference_record, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_conference_records: gapic_v1.method.wrap_method( + self.list_conference_records, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_participant: gapic_v1.method.wrap_method( + self.get_participant, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_participants: gapic_v1.method.wrap_method( + self.list_participants, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_participant_session: gapic_v1.method.wrap_method( + self.get_participant_session, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_participant_sessions: gapic_v1.method.wrap_method( + self.list_participant_sessions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_recording: gapic_v1.method.wrap_method( + self.get_recording, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_recordings: gapic_v1.method.wrap_method( + self.list_recordings, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_transcript: gapic_v1.method.wrap_method( + self.get_transcript, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_transcripts: gapic_v1.method.wrap_method( + self.list_transcripts, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_transcript_entry: gapic_v1.method.wrap_method( + self.get_transcript_entry, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_transcript_entries: gapic_v1.method.wrap_method( + self.list_transcript_entries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_conference_record( + self, + ) -> Callable[ + [service.GetConferenceRecordRequest], + Union[resource.ConferenceRecord, Awaitable[resource.ConferenceRecord]], + ]: + raise NotImplementedError() + + @property + def list_conference_records( + self, + ) -> Callable[ + [service.ListConferenceRecordsRequest], + Union[ + service.ListConferenceRecordsResponse, + Awaitable[service.ListConferenceRecordsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_participant( + self, + ) -> Callable[ + [service.GetParticipantRequest], + Union[resource.Participant, Awaitable[resource.Participant]], + ]: + raise NotImplementedError() + + @property + def list_participants( + self, + ) -> Callable[ + [service.ListParticipantsRequest], + Union[ + service.ListParticipantsResponse, + Awaitable[service.ListParticipantsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_participant_session( + self, + ) -> Callable[ + [service.GetParticipantSessionRequest], + Union[resource.ParticipantSession, Awaitable[resource.ParticipantSession]], + ]: + raise NotImplementedError() + + @property + def list_participant_sessions( + self, + ) -> Callable[ + [service.ListParticipantSessionsRequest], + Union[ + service.ListParticipantSessionsResponse, + Awaitable[service.ListParticipantSessionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_recording( + self, + ) -> Callable[ + [service.GetRecordingRequest], + Union[resource.Recording, Awaitable[resource.Recording]], + ]: + raise NotImplementedError() + + @property + def list_recordings( + self, + ) -> Callable[ + [service.ListRecordingsRequest], + Union[ + service.ListRecordingsResponse, Awaitable[service.ListRecordingsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_transcript( + self, + ) -> Callable[ + [service.GetTranscriptRequest], + Union[resource.Transcript, Awaitable[resource.Transcript]], + ]: + raise NotImplementedError() + + @property + def list_transcripts( + self, + ) -> Callable[ + [service.ListTranscriptsRequest], + Union[ + service.ListTranscriptsResponse, Awaitable[service.ListTranscriptsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_transcript_entry( + self, + ) -> Callable[ + [service.GetTranscriptEntryRequest], + Union[resource.TranscriptEntry, Awaitable[resource.TranscriptEntry]], + ]: + raise NotImplementedError() + + @property + def list_transcript_entries( + self, + ) -> Callable[ + [service.ListTranscriptEntriesRequest], + Union[ + service.ListTranscriptEntriesResponse, + Awaitable[service.ListTranscriptEntriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ConferenceRecordsServiceTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py new file mode 100644 index 000000000000..e5c4f7fc1d45 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py @@ -0,0 +1,603 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .base import DEFAULT_CLIENT_INFO, ConferenceRecordsServiceTransport + + +class ConferenceRecordsServiceGrpcTransport(ConferenceRecordsServiceTransport): + """gRPC backend transport for ConferenceRecordsService. + + REST API for services dealing with conference records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_conference_record( + self, + ) -> Callable[[service.GetConferenceRecordRequest], resource.ConferenceRecord]: + r"""Return a callable for the get conference record method over gRPC. + + `Developer + Preview `__. + Gets a conference record by conference ID. + + Returns: + Callable[[~.GetConferenceRecordRequest], + ~.ConferenceRecord]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conference_record" not in self._stubs: + self._stubs["get_conference_record"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetConferenceRecord", + request_serializer=service.GetConferenceRecordRequest.serialize, + response_deserializer=resource.ConferenceRecord.deserialize, + ) + return self._stubs["get_conference_record"] + + @property + def list_conference_records( + self, + ) -> Callable[ + [service.ListConferenceRecordsRequest], service.ListConferenceRecordsResponse + ]: + r"""Return a callable for the list conference records method over gRPC. + + `Developer + Preview `__. + Lists the conference records by start time and in descending + order. + + Returns: + Callable[[~.ListConferenceRecordsRequest], + ~.ListConferenceRecordsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conference_records" not in self._stubs: + self._stubs["list_conference_records"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListConferenceRecords", + request_serializer=service.ListConferenceRecordsRequest.serialize, + response_deserializer=service.ListConferenceRecordsResponse.deserialize, + ) + return self._stubs["list_conference_records"] + + @property + def get_participant( + self, + ) -> Callable[[service.GetParticipantRequest], resource.Participant]: + r"""Return a callable for the get participant method over gRPC. + + `Developer + Preview `__. + Gets a participant by participant ID. + + Returns: + Callable[[~.GetParticipantRequest], + ~.Participant]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_participant" not in self._stubs: + self._stubs["get_participant"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetParticipant", + request_serializer=service.GetParticipantRequest.serialize, + response_deserializer=resource.Participant.deserialize, + ) + return self._stubs["get_participant"] + + @property + def list_participants( + self, + ) -> Callable[[service.ListParticipantsRequest], service.ListParticipantsResponse]: + r"""Return a callable for the list participants method over gRPC. + + `Developer + Preview `__. + Lists the participants in a conference record, by default + ordered by join time and in descending order. This API supports + ``fields`` as standard parameters like every other API. However, + when the ``fields`` request parameter is omitted, this API + defaults to ``'participants/*, next_page_token'``. + + Returns: + Callable[[~.ListParticipantsRequest], + ~.ListParticipantsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_participants" not in self._stubs: + self._stubs["list_participants"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListParticipants", + request_serializer=service.ListParticipantsRequest.serialize, + response_deserializer=service.ListParticipantsResponse.deserialize, + ) + return self._stubs["list_participants"] + + @property + def get_participant_session( + self, + ) -> Callable[[service.GetParticipantSessionRequest], resource.ParticipantSession]: + r"""Return a callable for the get participant session method over gRPC. + + `Developer + Preview `__. + Gets a participant session by participant session ID. + + Returns: + Callable[[~.GetParticipantSessionRequest], + ~.ParticipantSession]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_participant_session" not in self._stubs: + self._stubs["get_participant_session"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetParticipantSession", + request_serializer=service.GetParticipantSessionRequest.serialize, + response_deserializer=resource.ParticipantSession.deserialize, + ) + return self._stubs["get_participant_session"] + + @property + def list_participant_sessions( + self, + ) -> Callable[ + [service.ListParticipantSessionsRequest], + service.ListParticipantSessionsResponse, + ]: + r"""Return a callable for the list participant sessions method over gRPC. + + `Developer + Preview `__. + Lists the participant sessions of a participant in a conference + record, by default ordered by join time and in descending order. + This API supports ``fields`` as standard parameters like every + other API. However, when the ``fields`` request parameter is + omitted this API defaults to + ``'participantsessions/*, next_page_token'``. + + Returns: + Callable[[~.ListParticipantSessionsRequest], + ~.ListParticipantSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_participant_sessions" not in self._stubs: + self._stubs["list_participant_sessions"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListParticipantSessions", + request_serializer=service.ListParticipantSessionsRequest.serialize, + response_deserializer=service.ListParticipantSessionsResponse.deserialize, + ) + return self._stubs["list_participant_sessions"] + + @property + def get_recording( + self, + ) -> Callable[[service.GetRecordingRequest], resource.Recording]: + r"""Return a callable for the get recording method over gRPC. + + `Developer + Preview `__. + Gets a recording by recording ID. + + Returns: + Callable[[~.GetRecordingRequest], + ~.Recording]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_recording" not in self._stubs: + self._stubs["get_recording"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetRecording", + request_serializer=service.GetRecordingRequest.serialize, + response_deserializer=resource.Recording.deserialize, + ) + return self._stubs["get_recording"] + + @property + def list_recordings( + self, + ) -> Callable[[service.ListRecordingsRequest], service.ListRecordingsResponse]: + r"""Return a callable for the list recordings method over gRPC. + + `Developer + Preview `__. + Lists the recording resources from the conference record. + + Returns: + Callable[[~.ListRecordingsRequest], + ~.ListRecordingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_recordings" not in self._stubs: + self._stubs["list_recordings"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListRecordings", + request_serializer=service.ListRecordingsRequest.serialize, + response_deserializer=service.ListRecordingsResponse.deserialize, + ) + return self._stubs["list_recordings"] + + @property + def get_transcript( + self, + ) -> Callable[[service.GetTranscriptRequest], resource.Transcript]: + r"""Return a callable for the get transcript method over gRPC. + + `Developer + Preview `__. + Gets a transcript by transcript ID. + + Returns: + Callable[[~.GetTranscriptRequest], + ~.Transcript]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transcript" not in self._stubs: + self._stubs["get_transcript"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetTranscript", + request_serializer=service.GetTranscriptRequest.serialize, + response_deserializer=resource.Transcript.deserialize, + ) + return self._stubs["get_transcript"] + + @property + def list_transcripts( + self, + ) -> Callable[[service.ListTranscriptsRequest], service.ListTranscriptsResponse]: + r"""Return a callable for the list transcripts method over gRPC. + + `Developer + Preview `__. + Lists the set of transcripts from the conference record. + + Returns: + Callable[[~.ListTranscriptsRequest], + ~.ListTranscriptsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transcripts" not in self._stubs: + self._stubs["list_transcripts"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListTranscripts", + request_serializer=service.ListTranscriptsRequest.serialize, + response_deserializer=service.ListTranscriptsResponse.deserialize, + ) + return self._stubs["list_transcripts"] + + @property + def get_transcript_entry( + self, + ) -> Callable[[service.GetTranscriptEntryRequest], resource.TranscriptEntry]: + r"""Return a callable for the get transcript entry method over gRPC. + + `Developer + Preview `__. + Gets a ``TranscriptEntry`` resource by entry ID. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + Returns: + Callable[[~.GetTranscriptEntryRequest], + ~.TranscriptEntry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transcript_entry" not in self._stubs: + self._stubs["get_transcript_entry"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetTranscriptEntry", + request_serializer=service.GetTranscriptEntryRequest.serialize, + response_deserializer=resource.TranscriptEntry.deserialize, + ) + return self._stubs["get_transcript_entry"] + + @property + def list_transcript_entries( + self, + ) -> Callable[ + [service.ListTranscriptEntriesRequest], service.ListTranscriptEntriesResponse + ]: + r"""Return a callable for the list transcript entries method over gRPC. + + `Developer + Preview `__. + Lists the structured transcript entries per transcript. By + default, ordered by start time and in ascending order. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + Returns: + Callable[[~.ListTranscriptEntriesRequest], + ~.ListTranscriptEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transcript_entries" not in self._stubs: + self._stubs["list_transcript_entries"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListTranscriptEntries", + request_serializer=service.ListTranscriptEntriesRequest.serialize, + response_deserializer=service.ListTranscriptEntriesResponse.deserialize, + ) + return self._stubs["list_transcript_entries"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ConferenceRecordsServiceGrpcTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c17c8c9c65ce --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py @@ -0,0 +1,616 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .base import DEFAULT_CLIENT_INFO, ConferenceRecordsServiceTransport +from .grpc import ConferenceRecordsServiceGrpcTransport + + +class ConferenceRecordsServiceGrpcAsyncIOTransport(ConferenceRecordsServiceTransport): + """gRPC AsyncIO backend transport for ConferenceRecordsService. + + REST API for services dealing with conference records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_conference_record( + self, + ) -> Callable[ + [service.GetConferenceRecordRequest], Awaitable[resource.ConferenceRecord] + ]: + r"""Return a callable for the get conference record method over gRPC. + + `Developer + Preview `__. + Gets a conference record by conference ID. + + Returns: + Callable[[~.GetConferenceRecordRequest], + Awaitable[~.ConferenceRecord]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conference_record" not in self._stubs: + self._stubs["get_conference_record"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetConferenceRecord", + request_serializer=service.GetConferenceRecordRequest.serialize, + response_deserializer=resource.ConferenceRecord.deserialize, + ) + return self._stubs["get_conference_record"] + + @property + def list_conference_records( + self, + ) -> Callable[ + [service.ListConferenceRecordsRequest], + Awaitable[service.ListConferenceRecordsResponse], + ]: + r"""Return a callable for the list conference records method over gRPC. + + `Developer + Preview `__. + Lists the conference records by start time and in descending + order. + + Returns: + Callable[[~.ListConferenceRecordsRequest], + Awaitable[~.ListConferenceRecordsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conference_records" not in self._stubs: + self._stubs["list_conference_records"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListConferenceRecords", + request_serializer=service.ListConferenceRecordsRequest.serialize, + response_deserializer=service.ListConferenceRecordsResponse.deserialize, + ) + return self._stubs["list_conference_records"] + + @property + def get_participant( + self, + ) -> Callable[[service.GetParticipantRequest], Awaitable[resource.Participant]]: + r"""Return a callable for the get participant method over gRPC. + + `Developer + Preview `__. + Gets a participant by participant ID. + + Returns: + Callable[[~.GetParticipantRequest], + Awaitable[~.Participant]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_participant" not in self._stubs: + self._stubs["get_participant"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetParticipant", + request_serializer=service.GetParticipantRequest.serialize, + response_deserializer=resource.Participant.deserialize, + ) + return self._stubs["get_participant"] + + @property + def list_participants( + self, + ) -> Callable[ + [service.ListParticipantsRequest], Awaitable[service.ListParticipantsResponse] + ]: + r"""Return a callable for the list participants method over gRPC. + + `Developer + Preview `__. + Lists the participants in a conference record, by default + ordered by join time and in descending order. This API supports + ``fields`` as standard parameters like every other API. However, + when the ``fields`` request parameter is omitted, this API + defaults to ``'participants/*, next_page_token'``. + + Returns: + Callable[[~.ListParticipantsRequest], + Awaitable[~.ListParticipantsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_participants" not in self._stubs: + self._stubs["list_participants"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListParticipants", + request_serializer=service.ListParticipantsRequest.serialize, + response_deserializer=service.ListParticipantsResponse.deserialize, + ) + return self._stubs["list_participants"] + + @property + def get_participant_session( + self, + ) -> Callable[ + [service.GetParticipantSessionRequest], Awaitable[resource.ParticipantSession] + ]: + r"""Return a callable for the get participant session method over gRPC. + + `Developer + Preview `__. + Gets a participant session by participant session ID. + + Returns: + Callable[[~.GetParticipantSessionRequest], + Awaitable[~.ParticipantSession]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_participant_session" not in self._stubs: + self._stubs["get_participant_session"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetParticipantSession", + request_serializer=service.GetParticipantSessionRequest.serialize, + response_deserializer=resource.ParticipantSession.deserialize, + ) + return self._stubs["get_participant_session"] + + @property + def list_participant_sessions( + self, + ) -> Callable[ + [service.ListParticipantSessionsRequest], + Awaitable[service.ListParticipantSessionsResponse], + ]: + r"""Return a callable for the list participant sessions method over gRPC. + + `Developer + Preview `__. + Lists the participant sessions of a participant in a conference + record, by default ordered by join time and in descending order. + This API supports ``fields`` as standard parameters like every + other API. However, when the ``fields`` request parameter is + omitted this API defaults to + ``'participantsessions/*, next_page_token'``. + + Returns: + Callable[[~.ListParticipantSessionsRequest], + Awaitable[~.ListParticipantSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_participant_sessions" not in self._stubs: + self._stubs["list_participant_sessions"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListParticipantSessions", + request_serializer=service.ListParticipantSessionsRequest.serialize, + response_deserializer=service.ListParticipantSessionsResponse.deserialize, + ) + return self._stubs["list_participant_sessions"] + + @property + def get_recording( + self, + ) -> Callable[[service.GetRecordingRequest], Awaitable[resource.Recording]]: + r"""Return a callable for the get recording method over gRPC. + + `Developer + Preview `__. + Gets a recording by recording ID. + + Returns: + Callable[[~.GetRecordingRequest], + Awaitable[~.Recording]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_recording" not in self._stubs: + self._stubs["get_recording"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetRecording", + request_serializer=service.GetRecordingRequest.serialize, + response_deserializer=resource.Recording.deserialize, + ) + return self._stubs["get_recording"] + + @property + def list_recordings( + self, + ) -> Callable[ + [service.ListRecordingsRequest], Awaitable[service.ListRecordingsResponse] + ]: + r"""Return a callable for the list recordings method over gRPC. + + `Developer + Preview `__. + Lists the recording resources from the conference record. + + Returns: + Callable[[~.ListRecordingsRequest], + Awaitable[~.ListRecordingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_recordings" not in self._stubs: + self._stubs["list_recordings"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListRecordings", + request_serializer=service.ListRecordingsRequest.serialize, + response_deserializer=service.ListRecordingsResponse.deserialize, + ) + return self._stubs["list_recordings"] + + @property + def get_transcript( + self, + ) -> Callable[[service.GetTranscriptRequest], Awaitable[resource.Transcript]]: + r"""Return a callable for the get transcript method over gRPC. + + `Developer + Preview `__. + Gets a transcript by transcript ID. + + Returns: + Callable[[~.GetTranscriptRequest], + Awaitable[~.Transcript]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transcript" not in self._stubs: + self._stubs["get_transcript"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetTranscript", + request_serializer=service.GetTranscriptRequest.serialize, + response_deserializer=resource.Transcript.deserialize, + ) + return self._stubs["get_transcript"] + + @property + def list_transcripts( + self, + ) -> Callable[ + [service.ListTranscriptsRequest], Awaitable[service.ListTranscriptsResponse] + ]: + r"""Return a callable for the list transcripts method over gRPC. + + `Developer + Preview `__. + Lists the set of transcripts from the conference record. + + Returns: + Callable[[~.ListTranscriptsRequest], + Awaitable[~.ListTranscriptsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transcripts" not in self._stubs: + self._stubs["list_transcripts"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListTranscripts", + request_serializer=service.ListTranscriptsRequest.serialize, + response_deserializer=service.ListTranscriptsResponse.deserialize, + ) + return self._stubs["list_transcripts"] + + @property + def get_transcript_entry( + self, + ) -> Callable[ + [service.GetTranscriptEntryRequest], Awaitable[resource.TranscriptEntry] + ]: + r"""Return a callable for the get transcript entry method over gRPC. + + `Developer + Preview `__. + Gets a ``TranscriptEntry`` resource by entry ID. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + Returns: + Callable[[~.GetTranscriptEntryRequest], + Awaitable[~.TranscriptEntry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transcript_entry" not in self._stubs: + self._stubs["get_transcript_entry"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/GetTranscriptEntry", + request_serializer=service.GetTranscriptEntryRequest.serialize, + response_deserializer=resource.TranscriptEntry.deserialize, + ) + return self._stubs["get_transcript_entry"] + + @property + def list_transcript_entries( + self, + ) -> Callable[ + [service.ListTranscriptEntriesRequest], + Awaitable[service.ListTranscriptEntriesResponse], + ]: + r"""Return a callable for the list transcript entries method over gRPC. + + `Developer + Preview `__. + Lists the structured transcript entries per transcript. By + default, ordered by start time and in ascending order. + + Note: The transcript entries returned by the Google Meet API + might not match the transcription found in the Google Docs + transcript file. This can occur when the Google Docs transcript + file is modified after generation. + + Returns: + Callable[[~.ListTranscriptEntriesRequest], + Awaitable[~.ListTranscriptEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transcript_entries" not in self._stubs: + self._stubs["list_transcript_entries"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.ConferenceRecordsService/ListTranscriptEntries", + request_serializer=service.ListTranscriptEntriesRequest.serialize, + response_deserializer=service.ListTranscriptEntriesResponse.deserialize, + ) + return self._stubs["list_transcript_entries"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ConferenceRecordsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py new file mode 100644 index 000000000000..c4a66094ad03 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py @@ -0,0 +1,1718 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.apps.meet_v2beta.types import resource, service + +from .base import ConferenceRecordsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ConferenceRecordsServiceRestInterceptor: + """Interceptor for ConferenceRecordsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ConferenceRecordsServiceRestTransport. + + .. code-block:: python + class MyCustomConferenceRecordsServiceInterceptor(ConferenceRecordsServiceRestInterceptor): + def pre_get_conference_record(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_conference_record(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_participant(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_participant(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_participant_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_participant_session(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_recording(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_recording(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transcript(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transcript(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transcript_entry(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transcript_entry(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_conference_records(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_conference_records(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_participants(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_participants(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_participant_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_participant_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_recordings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_recordings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transcript_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transcript_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transcripts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transcripts(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ConferenceRecordsServiceRestTransport(interceptor=MyCustomConferenceRecordsServiceInterceptor()) + client = ConferenceRecordsServiceClient(transport=transport) + + + """ + + def pre_get_conference_record( + self, + request: service.GetConferenceRecordRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetConferenceRecordRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_conference_record + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_conference_record( + self, response: resource.ConferenceRecord + ) -> resource.ConferenceRecord: + """Post-rpc interceptor for get_conference_record + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_get_participant( + self, + request: service.GetParticipantRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetParticipantRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_participant + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_participant( + self, response: resource.Participant + ) -> resource.Participant: + """Post-rpc interceptor for get_participant + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_get_participant_session( + self, + request: service.GetParticipantSessionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetParticipantSessionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_participant_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_participant_session( + self, response: resource.ParticipantSession + ) -> resource.ParticipantSession: + """Post-rpc interceptor for get_participant_session + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_get_recording( + self, request: service.GetRecordingRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetRecordingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_recording + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_recording(self, response: resource.Recording) -> resource.Recording: + """Post-rpc interceptor for get_recording + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_get_transcript( + self, request: service.GetTranscriptRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetTranscriptRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transcript + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_transcript(self, response: resource.Transcript) -> resource.Transcript: + """Post-rpc interceptor for get_transcript + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_get_transcript_entry( + self, + request: service.GetTranscriptEntryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.GetTranscriptEntryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transcript_entry + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_get_transcript_entry( + self, response: resource.TranscriptEntry + ) -> resource.TranscriptEntry: + """Post-rpc interceptor for get_transcript_entry + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_conference_records( + self, + request: service.ListConferenceRecordsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListConferenceRecordsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_conference_records + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_conference_records( + self, response: service.ListConferenceRecordsResponse + ) -> service.ListConferenceRecordsResponse: + """Post-rpc interceptor for list_conference_records + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_participants( + self, + request: service.ListParticipantsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListParticipantsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_participants + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_participants( + self, response: service.ListParticipantsResponse + ) -> service.ListParticipantsResponse: + """Post-rpc interceptor for list_participants + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_participant_sessions( + self, + request: service.ListParticipantSessionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListParticipantSessionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_participant_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_participant_sessions( + self, response: service.ListParticipantSessionsResponse + ) -> service.ListParticipantSessionsResponse: + """Post-rpc interceptor for list_participant_sessions + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_recordings( + self, + request: service.ListRecordingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListRecordingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_recordings + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_recordings( + self, response: service.ListRecordingsResponse + ) -> service.ListRecordingsResponse: + """Post-rpc interceptor for list_recordings + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_transcript_entries( + self, + request: service.ListTranscriptEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListTranscriptEntriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transcript_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_transcript_entries( + self, response: service.ListTranscriptEntriesResponse + ) -> service.ListTranscriptEntriesResponse: + """Post-rpc interceptor for list_transcript_entries + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + def pre_list_transcripts( + self, + request: service.ListTranscriptsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.ListTranscriptsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transcripts + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConferenceRecordsService server. + """ + return request, metadata + + def post_list_transcripts( + self, response: service.ListTranscriptsResponse + ) -> service.ListTranscriptsResponse: + """Post-rpc interceptor for list_transcripts + + Override in a subclass to manipulate the response + after it is returned by the ConferenceRecordsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ConferenceRecordsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ConferenceRecordsServiceRestInterceptor + + +class ConferenceRecordsServiceRestTransport(ConferenceRecordsServiceTransport): + """REST backend transport for ConferenceRecordsService. + + REST API for services dealing with conference records. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ConferenceRecordsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ConferenceRecordsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetConferenceRecord(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetConferenceRecord") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetConferenceRecordRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ConferenceRecord: + r"""Call the get conference record method over HTTP. + + Args: + request (~.service.GetConferenceRecordRequest): + The request object. Request to get a conference record. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.ConferenceRecord: + `Developer + Preview `__. + Single instance of a meeting held in a space. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*}", + }, + ] + request, metadata = self._interceptor.pre_get_conference_record( + request, metadata + ) + pb_request = service.GetConferenceRecordRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.ConferenceRecord() + pb_resp = resource.ConferenceRecord.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_conference_record(resp) + return resp + + class _GetParticipant(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetParticipant") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetParticipantRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Participant: + r"""Call the get participant method over HTTP. + + Args: + request (~.service.GetParticipantRequest): + The request object. Request to get a Participant. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Participant: + `Developer + Preview `__. + User who attended or is attending a conference. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*/participants/*}", + }, + ] + request, metadata = self._interceptor.pre_get_participant(request, metadata) + pb_request = service.GetParticipantRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Participant() + pb_resp = resource.Participant.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_participant(resp) + return resp + + class _GetParticipantSession(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetParticipantSession") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetParticipantSessionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.ParticipantSession: + r"""Call the get participant session method over HTTP. + + Args: + request (~.service.GetParticipantSessionRequest): + The request object. Request to get a participant session. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.ParticipantSession: + `Developer + Preview `__. + Refers to each unique join/leave session when a user + joins a conference from a device. Note that any time a + user joins the conference a new unique ID is assigned. + That means if a user joins a space multiple times from + the same device, they're assigned different IDs, and are + also be treated as different participant sessions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*/participants/*/participantSessions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_participant_session( + request, metadata + ) + pb_request = service.GetParticipantSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.ParticipantSession() + pb_resp = resource.ParticipantSession.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_participant_session(resp) + return resp + + class _GetRecording(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetRecording") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetRecordingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Recording: + r"""Call the get recording method over HTTP. + + Args: + request (~.service.GetRecordingRequest): + The request object. Request message for GetRecording + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Recording: + `Developer + Preview `__. + Metadata about a recording created during a conference. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*/recordings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_recording(request, metadata) + pb_request = service.GetRecordingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Recording() + pb_resp = resource.Recording.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_recording(resp) + return resp + + class _GetTranscript(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetTranscript") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetTranscriptRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Transcript: + r"""Call the get transcript method over HTTP. + + Args: + request (~.service.GetTranscriptRequest): + The request object. Request for GetTranscript method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Transcript: + `Developer + Preview `__. + Metadata for a transcript generated from a conference. + It refers to the ASR (Automatic Speech Recognition) + result of user's speech during the conference. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*/transcripts/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transcript(request, metadata) + pb_request = service.GetTranscriptRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Transcript() + pb_resp = resource.Transcript.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transcript(resp) + return resp + + class _GetTranscriptEntry(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("GetTranscriptEntry") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetTranscriptEntryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.TranscriptEntry: + r"""Call the get transcript entry method over HTTP. + + Args: + request (~.service.GetTranscriptEntryRequest): + The request object. Request for GetTranscriptEntry + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.TranscriptEntry: + `Developer + Preview `__. + Single entry for one user’s speech during a transcript + session. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=conferenceRecords/*/transcripts/*/entries/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transcript_entry( + request, metadata + ) + pb_request = service.GetTranscriptEntryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.TranscriptEntry() + pb_resp = resource.TranscriptEntry.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transcript_entry(resp) + return resp + + class _ListConferenceRecords(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListConferenceRecords") + + def __call__( + self, + request: service.ListConferenceRecordsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListConferenceRecordsResponse: + r"""Call the list conference records method over HTTP. + + Args: + request (~.service.ListConferenceRecordsRequest): + The request object. Request to fetch list of conference + records per user. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListConferenceRecordsResponse: + Response of ListConferenceRecords + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/conferenceRecords", + }, + ] + request, metadata = self._interceptor.pre_list_conference_records( + request, metadata + ) + pb_request = service.ListConferenceRecordsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListConferenceRecordsResponse() + pb_resp = service.ListConferenceRecordsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_conference_records(resp) + return resp + + class _ListParticipants(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListParticipants") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListParticipantsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListParticipantsResponse: + r"""Call the list participants method over HTTP. + + Args: + request (~.service.ListParticipantsRequest): + The request object. Request to fetch list of participant + per conference. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListParticipantsResponse: + Response of ListParticipants method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=conferenceRecords/*}/participants", + }, + ] + request, metadata = self._interceptor.pre_list_participants( + request, metadata + ) + pb_request = service.ListParticipantsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListParticipantsResponse() + pb_resp = service.ListParticipantsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_participants(resp) + return resp + + class _ListParticipantSessions(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListParticipantSessions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListParticipantSessionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListParticipantSessionsResponse: + r"""Call the list participant sessions method over HTTP. + + Args: + request (~.service.ListParticipantSessionsRequest): + The request object. Request to fetch list of participant + sessions per conference record per + participant. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListParticipantSessionsResponse: + Response of ListParticipants method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=conferenceRecords/*/participants/*}/participantSessions", + }, + ] + request, metadata = self._interceptor.pre_list_participant_sessions( + request, metadata + ) + pb_request = service.ListParticipantSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListParticipantSessionsResponse() + pb_resp = service.ListParticipantSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_participant_sessions(resp) + return resp + + class _ListRecordings(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListRecordings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListRecordingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListRecordingsResponse: + r"""Call the list recordings method over HTTP. + + Args: + request (~.service.ListRecordingsRequest): + The request object. Request for ListRecordings method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListRecordingsResponse: + Response for ListRecordings method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=conferenceRecords/*}/recordings", + }, + ] + request, metadata = self._interceptor.pre_list_recordings(request, metadata) + pb_request = service.ListRecordingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListRecordingsResponse() + pb_resp = service.ListRecordingsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_recordings(resp) + return resp + + class _ListTranscriptEntries(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListTranscriptEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListTranscriptEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListTranscriptEntriesResponse: + r"""Call the list transcript entries method over HTTP. + + Args: + request (~.service.ListTranscriptEntriesRequest): + The request object. Request for ListTranscriptEntries + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListTranscriptEntriesResponse: + Response for ListTranscriptEntries + method + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=conferenceRecords/*/transcripts/*}/entries", + }, + ] + request, metadata = self._interceptor.pre_list_transcript_entries( + request, metadata + ) + pb_request = service.ListTranscriptEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListTranscriptEntriesResponse() + pb_resp = service.ListTranscriptEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transcript_entries(resp) + return resp + + class _ListTranscripts(ConferenceRecordsServiceRestStub): + def __hash__(self): + return hash("ListTranscripts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListTranscriptsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListTranscriptsResponse: + r"""Call the list transcripts method over HTTP. + + Args: + request (~.service.ListTranscriptsRequest): + The request object. Request for ListTranscripts method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListTranscriptsResponse: + Response for ListTranscripts method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=conferenceRecords/*}/transcripts", + }, + ] + request, metadata = self._interceptor.pre_list_transcripts( + request, metadata + ) + pb_request = service.ListTranscriptsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListTranscriptsResponse() + pb_resp = service.ListTranscriptsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transcripts(resp) + return resp + + @property + def get_conference_record( + self, + ) -> Callable[[service.GetConferenceRecordRequest], resource.ConferenceRecord]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConferenceRecord(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_participant( + self, + ) -> Callable[[service.GetParticipantRequest], resource.Participant]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetParticipant(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_participant_session( + self, + ) -> Callable[[service.GetParticipantSessionRequest], resource.ParticipantSession]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetParticipantSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_recording( + self, + ) -> Callable[[service.GetRecordingRequest], resource.Recording]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRecording(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transcript( + self, + ) -> Callable[[service.GetTranscriptRequest], resource.Transcript]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTranscript(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transcript_entry( + self, + ) -> Callable[[service.GetTranscriptEntryRequest], resource.TranscriptEntry]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTranscriptEntry(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_conference_records( + self, + ) -> Callable[ + [service.ListConferenceRecordsRequest], service.ListConferenceRecordsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConferenceRecords(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_participants( + self, + ) -> Callable[[service.ListParticipantsRequest], service.ListParticipantsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListParticipants(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_participant_sessions( + self, + ) -> Callable[ + [service.ListParticipantSessionsRequest], + service.ListParticipantSessionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListParticipantSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_recordings( + self, + ) -> Callable[[service.ListRecordingsRequest], service.ListRecordingsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRecordings(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transcript_entries( + self, + ) -> Callable[ + [service.ListTranscriptEntriesRequest], service.ListTranscriptEntriesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTranscriptEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transcripts( + self, + ) -> Callable[[service.ListTranscriptsRequest], service.ListTranscriptsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTranscripts(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ConferenceRecordsServiceRestTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/__init__.py new file mode 100644 index 000000000000..497fa1835771 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SpacesServiceAsyncClient +from .client import SpacesServiceClient + +__all__ = ( + "SpacesServiceClient", + "SpacesServiceAsyncClient", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py new file mode 100644 index 000000000000..81e8e52ac953 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py @@ -0,0 +1,653 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .client import SpacesServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SpacesServiceTransport +from .transports.grpc_asyncio import SpacesServiceGrpcAsyncIOTransport + + +class SpacesServiceAsyncClient: + """REST API for services dealing with spaces.""" + + _client: SpacesServiceClient + + DEFAULT_ENDPOINT = SpacesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SpacesServiceClient.DEFAULT_MTLS_ENDPOINT + + conference_record_path = staticmethod(SpacesServiceClient.conference_record_path) + parse_conference_record_path = staticmethod( + SpacesServiceClient.parse_conference_record_path + ) + space_path = staticmethod(SpacesServiceClient.space_path) + parse_space_path = staticmethod(SpacesServiceClient.parse_space_path) + common_billing_account_path = staticmethod( + SpacesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SpacesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SpacesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SpacesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SpacesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SpacesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SpacesServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SpacesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SpacesServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SpacesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpacesServiceAsyncClient: The constructed client. + """ + return SpacesServiceClient.from_service_account_info.__func__(SpacesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpacesServiceAsyncClient: The constructed client. + """ + return SpacesServiceClient.from_service_account_file.__func__(SpacesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SpacesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SpacesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SpacesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SpacesServiceClient).get_transport_class, type(SpacesServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SpacesServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spaces service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SpacesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SpacesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_space( + self, + request: Optional[Union[service.CreateSpaceRequest, dict]] = None, + *, + space: Optional[resource.Space] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Creates a space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_create_space(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateSpaceRequest( + ) + + # Make the request + response = await client.create_space(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.CreateSpaceRequest, dict]]): + The request object. Request to create a space. + space (:class:`google.apps.meet_v2beta.types.Space`): + Space to be created. As of May 2023, + the input space can be empty. Later on + the input space can be non-empty when + space configuration is introduced. + + This corresponds to the ``space`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([space]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateSpaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if space is not None: + request.space = space + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_space, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_space( + self, + request: Optional[Union[service.GetSpaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Gets a space by ``space_id`` or ``meeting_code``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_space(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetSpaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_space(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetSpaceRequest, dict]]): + The request object. Request to get a space. + name (:class:`str`): + Required. Resource name of the space. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetSpaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_space, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_space( + self, + request: Optional[Union[service.UpdateSpaceRequest, dict]] = None, + *, + space: Optional[resource.Space] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Updates a space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_update_space(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.UpdateSpaceRequest( + ) + + # Make the request + response = await client.update_space(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.UpdateSpaceRequest, dict]]): + The request object. Request to update a space. + space (:class:`google.apps.meet_v2beta.types.Space`): + Required. Space to be updated. + This corresponds to the ``space`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask used to specify the fields to be + updated in the space. If update_mask isn't provided, it + defaults to '*' and updates all fields provided in the + request, including deleting fields not set in the + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([space, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateSpaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if space is not None: + request.space = space + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_space, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("space.name", request.space.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def end_active_conference( + self, + request: Optional[Union[service.EndActiveConferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""`Developer + Preview `__. + Ends an active conference (if there is one). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_end_active_conference(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.EndActiveConferenceRequest( + name="name_value", + ) + + # Make the request + await client.end_active_conference(request=request) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.EndActiveConferenceRequest, dict]]): + The request object. Request to end an ongoing conference + of a space. + name (:class:`str`): + Required. Resource name of the space. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.EndActiveConferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.end_active_conference, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "SpacesServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SpacesServiceAsyncClient",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py new file mode 100644 index 000000000000..b262f0eae19a --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py @@ -0,0 +1,882 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .transports.base import DEFAULT_CLIENT_INFO, SpacesServiceTransport +from .transports.grpc import SpacesServiceGrpcTransport +from .transports.grpc_asyncio import SpacesServiceGrpcAsyncIOTransport +from .transports.rest import SpacesServiceRestTransport + + +class SpacesServiceClientMeta(type): + """Metaclass for the SpacesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SpacesServiceTransport]] + _transport_registry["grpc"] = SpacesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SpacesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SpacesServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SpacesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SpacesServiceClient(metaclass=SpacesServiceClientMeta): + """REST API for services dealing with spaces.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "meet.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpacesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpacesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SpacesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SpacesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def conference_record_path( + conference_record: str, + ) -> str: + """Returns a fully-qualified conference_record string.""" + return "conferenceRecords/{conference_record}".format( + conference_record=conference_record, + ) + + @staticmethod + def parse_conference_record_path(path: str) -> Dict[str, str]: + """Parses a conference_record path into its component segments.""" + m = re.match(r"^conferenceRecords/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def space_path( + space: str, + ) -> str: + """Returns a fully-qualified space string.""" + return "spaces/{space}".format( + space=space, + ) + + @staticmethod + def parse_space_path(path: str) -> Dict[str, str]: + """Parses a space path into its component segments.""" + m = re.match(r"^spaces/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SpacesServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spaces service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SpacesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SpacesServiceTransport): + # transport is a SpacesServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_space( + self, + request: Optional[Union[service.CreateSpaceRequest, dict]] = None, + *, + space: Optional[resource.Space] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Creates a space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_create_space(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateSpaceRequest( + ) + + # Make the request + response = client.create_space(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.CreateSpaceRequest, dict]): + The request object. Request to create a space. + space (google.apps.meet_v2beta.types.Space): + Space to be created. As of May 2023, + the input space can be empty. Later on + the input space can be non-empty when + space configuration is introduced. + + This corresponds to the ``space`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([space]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateSpaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateSpaceRequest): + request = service.CreateSpaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if space is not None: + request.space = space + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_space] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_space( + self, + request: Optional[Union[service.GetSpaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Gets a space by ``space_id`` or ``meeting_code``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_space(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetSpaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_space(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetSpaceRequest, dict]): + The request object. Request to get a space. + name (str): + Required. Resource name of the space. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetSpaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetSpaceRequest): + request = service.GetSpaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_space] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_space( + self, + request: Optional[Union[service.UpdateSpaceRequest, dict]] = None, + *, + space: Optional[resource.Space] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""`Developer + Preview `__. + Updates a space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_update_space(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.UpdateSpaceRequest( + ) + + # Make the request + response = client.update_space(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.UpdateSpaceRequest, dict]): + The request object. Request to update a space. + space (google.apps.meet_v2beta.types.Space): + Required. Space to be updated. + This corresponds to the ``space`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + updated in the space. If update_mask isn't provided, it + defaults to '*' and updates all fields provided in the + request, including deleting fields not set in the + request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.meet_v2beta.types.Space: + [Developer Preview](\ https://developers.google.com/workspace/preview). + Virtual place where conferences are held. Only one + active conference can be held in one space at any + given time. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([space, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateSpaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateSpaceRequest): + request = service.UpdateSpaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if space is not None: + request.space = space + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_space] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("space.name", request.space.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def end_active_conference( + self, + request: Optional[Union[service.EndActiveConferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""`Developer + Preview `__. + Ends an active conference (if there is one). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_end_active_conference(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.EndActiveConferenceRequest( + name="name_value", + ) + + # Make the request + client.end_active_conference(request=request) + + Args: + request (Union[google.apps.meet_v2beta.types.EndActiveConferenceRequest, dict]): + The request object. Request to end an ongoing conference + of a space. + name (str): + Required. Resource name of the space. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.EndActiveConferenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.EndActiveConferenceRequest): + request = service.EndActiveConferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.end_active_conference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "SpacesServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SpacesServiceClient",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/__init__.py new file mode 100644 index 000000000000..fb7d7c132f24 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SpacesServiceTransport +from .grpc import SpacesServiceGrpcTransport +from .grpc_asyncio import SpacesServiceGrpcAsyncIOTransport +from .rest import SpacesServiceRestInterceptor, SpacesServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SpacesServiceTransport]] +_transport_registry["grpc"] = SpacesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SpacesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SpacesServiceRestTransport + +__all__ = ( + "SpacesServiceTransport", + "SpacesServiceGrpcTransport", + "SpacesServiceGrpcAsyncIOTransport", + "SpacesServiceRestTransport", + "SpacesServiceRestInterceptor", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py new file mode 100644 index 000000000000..76e05b757e11 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.apps.meet_v2beta import gapic_version as package_version +from google.apps.meet_v2beta.types import resource, service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SpacesServiceTransport(abc.ABC): + """Abstract transport class for SpacesService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "meet.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_space: gapic_v1.method.wrap_method( + self.create_space, + default_timeout=60.0, + client_info=client_info, + ), + self.get_space: gapic_v1.method.wrap_method( + self.get_space, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_space: gapic_v1.method.wrap_method( + self.update_space, + default_timeout=60.0, + client_info=client_info, + ), + self.end_active_conference: gapic_v1.method.wrap_method( + self.end_active_conference, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_space( + self, + ) -> Callable[ + [service.CreateSpaceRequest], Union[resource.Space, Awaitable[resource.Space]] + ]: + raise NotImplementedError() + + @property + def get_space( + self, + ) -> Callable[ + [service.GetSpaceRequest], Union[resource.Space, Awaitable[resource.Space]] + ]: + raise NotImplementedError() + + @property + def update_space( + self, + ) -> Callable[ + [service.UpdateSpaceRequest], Union[resource.Space, Awaitable[resource.Space]] + ]: + raise NotImplementedError() + + @property + def end_active_conference( + self, + ) -> Callable[ + [service.EndActiveConferenceRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SpacesServiceTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py new file mode 100644 index 000000000000..31b533a4c005 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py @@ -0,0 +1,346 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .base import DEFAULT_CLIENT_INFO, SpacesServiceTransport + + +class SpacesServiceGrpcTransport(SpacesServiceTransport): + """gRPC backend transport for SpacesService. + + REST API for services dealing with spaces. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space]: + r"""Return a callable for the create space method over gRPC. + + `Developer + Preview `__. + Creates a space. + + Returns: + Callable[[~.CreateSpaceRequest], + ~.Space]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_space" not in self._stubs: + self._stubs["create_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/CreateSpace", + request_serializer=service.CreateSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["create_space"] + + @property + def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: + r"""Return a callable for the get space method over gRPC. + + `Developer + Preview `__. + Gets a space by ``space_id`` or ``meeting_code``. + + Returns: + Callable[[~.GetSpaceRequest], + ~.Space]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_space" not in self._stubs: + self._stubs["get_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/GetSpace", + request_serializer=service.GetSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["get_space"] + + @property + def update_space(self) -> Callable[[service.UpdateSpaceRequest], resource.Space]: + r"""Return a callable for the update space method over gRPC. + + `Developer + Preview `__. + Updates a space. + + Returns: + Callable[[~.UpdateSpaceRequest], + ~.Space]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_space" not in self._stubs: + self._stubs["update_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/UpdateSpace", + request_serializer=service.UpdateSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["update_space"] + + @property + def end_active_conference( + self, + ) -> Callable[[service.EndActiveConferenceRequest], empty_pb2.Empty]: + r"""Return a callable for the end active conference method over gRPC. + + `Developer + Preview `__. + Ends an active conference (if there is one). + + Returns: + Callable[[~.EndActiveConferenceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "end_active_conference" not in self._stubs: + self._stubs["end_active_conference"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/EndActiveConference", + request_serializer=service.EndActiveConferenceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["end_active_conference"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SpacesServiceGrpcTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a38f135f8012 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py @@ -0,0 +1,351 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .base import DEFAULT_CLIENT_INFO, SpacesServiceTransport +from .grpc import SpacesServiceGrpcTransport + + +class SpacesServiceGrpcAsyncIOTransport(SpacesServiceTransport): + """gRPC AsyncIO backend transport for SpacesService. + + REST API for services dealing with spaces. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_space( + self, + ) -> Callable[[service.CreateSpaceRequest], Awaitable[resource.Space]]: + r"""Return a callable for the create space method over gRPC. + + `Developer + Preview `__. + Creates a space. + + Returns: + Callable[[~.CreateSpaceRequest], + Awaitable[~.Space]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_space" not in self._stubs: + self._stubs["create_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/CreateSpace", + request_serializer=service.CreateSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["create_space"] + + @property + def get_space( + self, + ) -> Callable[[service.GetSpaceRequest], Awaitable[resource.Space]]: + r"""Return a callable for the get space method over gRPC. + + `Developer + Preview `__. + Gets a space by ``space_id`` or ``meeting_code``. + + Returns: + Callable[[~.GetSpaceRequest], + Awaitable[~.Space]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_space" not in self._stubs: + self._stubs["get_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/GetSpace", + request_serializer=service.GetSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["get_space"] + + @property + def update_space( + self, + ) -> Callable[[service.UpdateSpaceRequest], Awaitable[resource.Space]]: + r"""Return a callable for the update space method over gRPC. + + `Developer + Preview `__. + Updates a space. + + Returns: + Callable[[~.UpdateSpaceRequest], + Awaitable[~.Space]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_space" not in self._stubs: + self._stubs["update_space"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/UpdateSpace", + request_serializer=service.UpdateSpaceRequest.serialize, + response_deserializer=resource.Space.deserialize, + ) + return self._stubs["update_space"] + + @property + def end_active_conference( + self, + ) -> Callable[[service.EndActiveConferenceRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the end active conference method over gRPC. + + `Developer + Preview `__. + Ends an active conference (if there is one). + + Returns: + Callable[[~.EndActiveConferenceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "end_active_conference" not in self._stubs: + self._stubs["end_active_conference"] = self.grpc_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/EndActiveConference", + request_serializer=service.EndActiveConferenceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["end_active_conference"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("SpacesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py new file mode 100644 index 000000000000..251086436b8e --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py @@ -0,0 +1,668 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.apps.meet_v2beta.types import resource, service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SpacesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SpacesServiceRestInterceptor: + """Interceptor for SpacesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SpacesServiceRestTransport. + + .. code-block:: python + class MyCustomSpacesServiceInterceptor(SpacesServiceRestInterceptor): + def pre_create_space(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_space(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_end_active_conference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_space(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_space(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_space(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_space(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SpacesServiceRestTransport(interceptor=MyCustomSpacesServiceInterceptor()) + client = SpacesServiceClient(transport=transport) + + + """ + + def pre_create_space( + self, request: service.CreateSpaceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.CreateSpaceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_space + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_create_space(self, response: resource.Space) -> resource.Space: + """Post-rpc interceptor for create_space + + Override in a subclass to manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. + """ + return response + + def pre_end_active_conference( + self, + request: service.EndActiveConferenceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.EndActiveConferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for end_active_conference + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def pre_get_space( + self, request: service.GetSpaceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetSpaceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_space + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_get_space(self, response: resource.Space) -> resource.Space: + """Post-rpc interceptor for get_space + + Override in a subclass to manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. + """ + return response + + def pre_update_space( + self, request: service.UpdateSpaceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.UpdateSpaceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_space + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_update_space(self, response: resource.Space) -> resource.Space: + """Post-rpc interceptor for update_space + + Override in a subclass to manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SpacesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SpacesServiceRestInterceptor + + +class SpacesServiceRestTransport(SpacesServiceTransport): + """REST backend transport for SpacesService. + + REST API for services dealing with spaces. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "meet.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SpacesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SpacesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateSpace(SpacesServiceRestStub): + def __hash__(self): + return hash("CreateSpace") + + def __call__( + self, + request: service.CreateSpaceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""Call the create space method over HTTP. + + Args: + request (~.service.CreateSpaceRequest): + The request object. Request to create a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Space: + `Developer + Preview `__. + Virtual place where conferences are held. Only one + active conference can be held in one space at any given + time. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta/spaces", + "body": "space", + }, + ] + request, metadata = self._interceptor.pre_create_space(request, metadata) + pb_request = service.CreateSpaceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Space() + pb_resp = resource.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_space(resp) + return resp + + class _EndActiveConference(SpacesServiceRestStub): + def __hash__(self): + return hash("EndActiveConference") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.EndActiveConferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the end active conference method over HTTP. + + Args: + request (~.service.EndActiveConferenceRequest): + The request object. Request to end an ongoing conference + of a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta/{name=spaces/*}:endActiveConference", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_end_active_conference( + request, metadata + ) + pb_request = service.EndActiveConferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSpace(SpacesServiceRestStub): + def __hash__(self): + return hash("GetSpace") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetSpaceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""Call the get space method over HTTP. + + Args: + request (~.service.GetSpaceRequest): + The request object. Request to get a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Space: + `Developer + Preview `__. + Virtual place where conferences are held. Only one + active conference can be held in one space at any given + time. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=spaces/*}", + }, + ] + request, metadata = self._interceptor.pre_get_space(request, metadata) + pb_request = service.GetSpaceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Space() + pb_resp = resource.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_space(resp) + return resp + + class _UpdateSpace(SpacesServiceRestStub): + def __hash__(self): + return hash("UpdateSpace") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateSpaceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resource.Space: + r"""Call the update space method over HTTP. + + Args: + request (~.service.UpdateSpaceRequest): + The request object. Request to update a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resource.Space: + `Developer + Preview `__. + Virtual place where conferences are held. Only one + active conference can be held in one space at any given + time. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2beta/{space.name=spaces/*}", + "body": "space", + }, + ] + request, metadata = self._interceptor.pre_update_space(request, metadata) + pb_request = service.UpdateSpaceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Space() + pb_resp = resource.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_space(resp) + return resp + + @property + def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSpace(self._session, self._host, self._interceptor) # type: ignore + + @property + def end_active_conference( + self, + ) -> Callable[[service.EndActiveConferenceRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EndActiveConference(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSpace(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_space(self) -> Callable[[service.UpdateSpaceRequest], resource.Space]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSpace(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SpacesServiceRestTransport",) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py new file mode 100644 index 000000000000..2c25509ad9bb --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resource import ( + ActiveConference, + AnonymousUser, + ConferenceRecord, + DocsDestination, + DriveDestination, + Participant, + ParticipantSession, + PhoneUser, + Recording, + SignedinUser, + Space, + SpaceConfig, + Transcript, + TranscriptEntry, +) +from .service import ( + CreateSpaceRequest, + EndActiveConferenceRequest, + GetConferenceRecordRequest, + GetParticipantRequest, + GetParticipantSessionRequest, + GetRecordingRequest, + GetSpaceRequest, + GetTranscriptEntryRequest, + GetTranscriptRequest, + ListConferenceRecordsRequest, + ListConferenceRecordsResponse, + ListParticipantSessionsRequest, + ListParticipantSessionsResponse, + ListParticipantsRequest, + ListParticipantsResponse, + ListRecordingsRequest, + ListRecordingsResponse, + ListTranscriptEntriesRequest, + ListTranscriptEntriesResponse, + ListTranscriptsRequest, + ListTranscriptsResponse, + UpdateSpaceRequest, +) + +__all__ = ( + "ActiveConference", + "AnonymousUser", + "ConferenceRecord", + "DocsDestination", + "DriveDestination", + "Participant", + "ParticipantSession", + "PhoneUser", + "Recording", + "SignedinUser", + "Space", + "SpaceConfig", + "Transcript", + "TranscriptEntry", + "CreateSpaceRequest", + "EndActiveConferenceRequest", + "GetConferenceRecordRequest", + "GetParticipantRequest", + "GetParticipantSessionRequest", + "GetRecordingRequest", + "GetSpaceRequest", + "GetTranscriptEntryRequest", + "GetTranscriptRequest", + "ListConferenceRecordsRequest", + "ListConferenceRecordsResponse", + "ListParticipantSessionsRequest", + "ListParticipantSessionsResponse", + "ListParticipantsRequest", + "ListParticipantsResponse", + "ListRecordingsRequest", + "ListRecordingsResponse", + "ListTranscriptEntriesRequest", + "ListTranscriptEntriesResponse", + "ListTranscriptsRequest", + "ListTranscriptsResponse", + "UpdateSpaceRequest", +) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py new file mode 100644 index 000000000000..e4485db03874 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.apps.meet.v2beta", + manifest={ + "Space", + "ActiveConference", + "SpaceConfig", + "ConferenceRecord", + "Participant", + "ParticipantSession", + "SignedinUser", + "AnonymousUser", + "PhoneUser", + "Recording", + "DriveDestination", + "Transcript", + "DocsDestination", + "TranscriptEntry", + }, +) + + +class Space(proto.Message): + r"""`Developer + Preview `__. + Virtual place where conferences are held. Only one active conference + can be held in one space at any given time. + + Attributes: + name (str): + Immutable. Resource name of the space. Format: + ``spaces/{space}`` + meeting_uri (str): + Output only. URI used to join meeting, such as + ``https://meet.google.com/abc-mnop-xyz``. + meeting_code (str): + Output only. Type friendly code to join the meeting. Format: + ``[a-z]+-[a-z]+-[a-z]+`` such as ``abc-mnop-xyz``. The + maximum length is 128 characters. Can ONLY be used as alias + of the space ID to get the space. + config (google.apps.meet_v2beta.types.SpaceConfig): + Configuration pertaining to the meeting + space. + active_conference (google.apps.meet_v2beta.types.ActiveConference): + Active conference if it exists. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + meeting_uri: str = proto.Field( + proto.STRING, + number=2, + ) + meeting_code: str = proto.Field( + proto.STRING, + number=3, + ) + config: "SpaceConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="SpaceConfig", + ) + active_conference: "ActiveConference" = proto.Field( + proto.MESSAGE, + number=6, + message="ActiveConference", + ) + + +class ActiveConference(proto.Message): + r"""Active conference. + + Attributes: + conference_record (str): + Output only. Reference to 'ConferenceRecord' resource. + Format: ``conferenceRecords/{conference_record}`` where + ``{conference_record}`` is a unique id for each instance of + a call within a space. + """ + + conference_record: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SpaceConfig(proto.Message): + r"""The configuration pertaining to a meeting space. + + Attributes: + access_type (google.apps.meet_v2beta.types.SpaceConfig.AccessType): + Access type of the meeting space that + determines who can join without knocking. + Default: The user's default access settings. + Controlled by the user's admin for enterprise + users or RESTRICTED. + entry_point_access (google.apps.meet_v2beta.types.SpaceConfig.EntryPointAccess): + Defines the entry points that can be used to + join meetings hosted in this meeting space. + Default: EntryPointAccess.ALL + """ + + class AccessType(proto.Enum): + r"""Possible access types for a meeting space. + + Values: + ACCESS_TYPE_UNSPECIFIED (0): + Default value specified by the user's + organization. Note: This is never returned, as + the configured access type is returned instead. + OPEN (1): + Anyone with the join information (for + example, the URL or phone access information) + can join without knocking. + TRUSTED (2): + Members of the host's organization, invited + external users, and dial-in users can join + without knocking. Everyone else must knock. + RESTRICTED (3): + Only invitees can join without knocking. + Everyone else must knock. + """ + ACCESS_TYPE_UNSPECIFIED = 0 + OPEN = 1 + TRUSTED = 2 + RESTRICTED = 3 + + class EntryPointAccess(proto.Enum): + r"""Entry points that can be used to join a meeting. Example: + ``meet.google.com``, the Embed SDK Web, or a mobile application. + + Values: + ENTRY_POINT_ACCESS_UNSPECIFIED (0): + Unused. + ALL (1): + All entry points are allowed. + CREATOR_APP_ONLY (2): + Only entry points owned by the Google Cloud + project that created the space can be used to + join meetings in this space. Apps can use the + Embed SDK Web or mobile Meet SDKs to create + owned entry points. + """ + ENTRY_POINT_ACCESS_UNSPECIFIED = 0 + ALL = 1 + CREATOR_APP_ONLY = 2 + + access_type: AccessType = proto.Field( + proto.ENUM, + number=1, + enum=AccessType, + ) + entry_point_access: EntryPointAccess = proto.Field( + proto.ENUM, + number=2, + enum=EntryPointAccess, + ) + + +class ConferenceRecord(proto.Message): + r"""`Developer + Preview `__. Single + instance of a meeting held in a space. + + Attributes: + name (str): + Identifier. Resource name of the conference record. Format: + ``conferenceRecords/{conference_record}`` where + ``{conference_record}`` is a unique id for each instance of + a call within a space. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the conference + started, always set. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the conference + ended. Set for past conferences. Unset if the + conference is ongoing. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server enforced expire time for + when this conference record resource is deleted. + The resource is deleted 30 days after the + conference ends. + space (str): + Output only. The space where the conference + was held. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + space: str = proto.Field( + proto.STRING, + number=5, + ) + + +class Participant(proto.Message): + r"""`Developer + Preview `__. User + who attended or is attending a conference. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + signedin_user (google.apps.meet_v2beta.types.SignedinUser): + Signed-in user. + + This field is a member of `oneof`_ ``user``. + anonymous_user (google.apps.meet_v2beta.types.AnonymousUser): + Anonymous user. + + This field is a member of `oneof`_ ``user``. + phone_user (google.apps.meet_v2beta.types.PhoneUser): + User who calls in from their phone. + + This field is a member of `oneof`_ ``user``. + name (str): + Output only. Resource name of the participant. Format: + ``conferenceRecords/{conference_record}/participants/{participant}`` + earliest_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the participant joined + the meeting for the first time. + latest_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the participant left + the meeting for the last time. This can be null + if it is an active meeting. + """ + + signedin_user: "SignedinUser" = proto.Field( + proto.MESSAGE, + number=4, + oneof="user", + message="SignedinUser", + ) + anonymous_user: "AnonymousUser" = proto.Field( + proto.MESSAGE, + number=5, + oneof="user", + message="AnonymousUser", + ) + phone_user: "PhoneUser" = proto.Field( + proto.MESSAGE, + number=6, + oneof="user", + message="PhoneUser", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + earliest_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + latest_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + + +class ParticipantSession(proto.Message): + r"""`Developer + Preview `__. Refers + to each unique join/leave session when a user joins a conference + from a device. Note that any time a user joins the conference a new + unique ID is assigned. That means if a user joins a space multiple + times from the same device, they're assigned different IDs, and are + also be treated as different participant sessions. + + Attributes: + name (str): + Identifier. Session id. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the user session + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the user session + ended. Unset if the user session hasn’t ended. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class SignedinUser(proto.Message): + r"""A signed-in user can be: + + a) An individual joining from a personal computer, mobile + device, or through companion mode. + b) A robot account used by conference room devices. + + Attributes: + user (str): + Output only. Unique ID for the user. Interoperable with + Admin SDK API and People API. Format: ``users/{user}`` + display_name (str): + Output only. For a personal device, it's the + user's first and last name. For a robot account, + it's the admin specified device name. For + example, "Altostrat Room". + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnonymousUser(proto.Message): + r"""User who joins anonymously (meaning not signed into a Google + Account). + + Attributes: + display_name (str): + Output only. User provided name when they + join a conference anonymously. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PhoneUser(proto.Message): + r"""User dialing in from a phone where the user's identity is + unknown because they haven't signed in with a Google Account. + + Attributes: + display_name (str): + Output only. Partially redacted user's phone + number when they call in. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Recording(proto.Message): + r"""`Developer + Preview `__. + Metadata about a recording created during a conference. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + drive_destination (google.apps.meet_v2beta.types.DriveDestination): + Output only. Recording is saved to Google Drive as an mp4 + file. The ``drive_destination`` includes the Drive + ``fileId`` that can be used to download the file using the + ``files.get`` method of the Drive API. + + This field is a member of `oneof`_ ``destination``. + name (str): + Output only. Resource name of the recording. Format: + ``conferenceRecords/{conference_record}/recordings/{recording}`` + where ``{recording}`` is a 1:1 mapping to each unique + recording session during the conference. + state (google.apps.meet_v2beta.types.Recording.State): + Output only. Current state. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the recording + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the recording + ended. + """ + + class State(proto.Enum): + r"""Current state of the recording session. + + Values: + STATE_UNSPECIFIED (0): + Default, never used. + STARTED (1): + An active recording session has started. + ENDED (2): + This recording session has ended, but the + recording file hasn't been generated yet. + FILE_GENERATED (3): + Recording file is generated and ready to + download. + """ + STATE_UNSPECIFIED = 0 + STARTED = 1 + ENDED = 2 + FILE_GENERATED = 3 + + drive_destination: "DriveDestination" = proto.Field( + proto.MESSAGE, + number=6, + oneof="destination", + message="DriveDestination", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class DriveDestination(proto.Message): + r"""Export location where a recording file is saved in Google + Drive. + + Attributes: + file (str): + Output only. The ``fileId`` for the underlying MP4 file. For + example, "1kuceFZohVoCh6FulBHxwy6I15Ogpc4hP". Use + ``$ GET https://www.googleapis.com/drive/v3/files/{$fileId}?alt=media`` + to download the blob. For more information, see + https://developers.google.com/drive/api/v3/reference/files/get. + export_uri (str): + Output only. Link used to play back the recording file in + the browser. For example, + ``https://drive.google.com/file/d/{$fileId}/view``. + """ + + file: str = proto.Field( + proto.STRING, + number=1, + ) + export_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Transcript(proto.Message): + r"""`Developer + Preview `__. + Metadata for a transcript generated from a conference. It refers to + the ASR (Automatic Speech Recognition) result of user's speech + during the conference. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + docs_destination (google.apps.meet_v2beta.types.DocsDestination): + Output only. Where the Google Docs transcript + is saved. + + This field is a member of `oneof`_ ``destination``. + name (str): + Output only. Resource name of the transcript. Format: + ``conferenceRecords/{conference_record}/transcripts/{transcript}``, + where ``{transcript}`` is a 1:1 mapping to each unique + transcription session of the conference. + state (google.apps.meet_v2beta.types.Transcript.State): + Output only. Current state. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the transcript + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the transcript + stopped. + """ + + class State(proto.Enum): + r"""Current state of the transcript session. + + Values: + STATE_UNSPECIFIED (0): + Default, never used. + STARTED (1): + An active transcript session has started. + ENDED (2): + This transcript session has ended, but the + transcript file hasn't been generated yet. + FILE_GENERATED (3): + Transcript file is generated and ready to + download. + """ + STATE_UNSPECIFIED = 0 + STARTED = 1 + ENDED = 2 + FILE_GENERATED = 3 + + docs_destination: "DocsDestination" = proto.Field( + proto.MESSAGE, + number=6, + oneof="destination", + message="DocsDestination", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class DocsDestination(proto.Message): + r"""Google Docs location where the transcript file is saved. + + Attributes: + document (str): + Output only. The document ID for the underlying Google Docs + transcript file. For example, + "1kuceFZohVoCh6FulBHxwy6I15Ogpc4hP". Use the + ``documents.get`` method of the Google Docs API + (https://developers.google.com/docs/api/reference/rest/v1/documents/get) + to fetch the content. + export_uri (str): + Output only. URI for the Google Docs transcript file. Use + ``https://docs.google.com/document/d/{$DocumentId}/view`` to + browse the transcript in the browser. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + export_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TranscriptEntry(proto.Message): + r"""`Developer + Preview `__. Single + entry for one user’s speech during a transcript session. + + Attributes: + name (str): + Output only. Resource name of the entry. Format: + "conferenceRecords/{conference_record}/transcripts/{transcript}/entries/{entry}". + participant (str): + Output only. Refer to the participant who + speaks. + text (str): + Output only. The transcribed text of the + participant's voice, at maximum 10K words. Note + that the limit is subject to change. + language_code (str): + Output only. Language of spoken text, such as + "en-US". IETF BCP 47 syntax + (https://tools.ietf.org/html/bcp47) + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the transcript + entry started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the transcript + entry ended. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + participant: str = proto.Field( + proto.STRING, + number=2, + ) + text: str = proto.Field( + proto.STRING, + number=3, + ) + language_code: str = proto.Field( + proto.STRING, + number=4, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py new file mode 100644 index 000000000000..597f0e5dcfe6 --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py @@ -0,0 +1,615 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.apps.meet_v2beta.types import resource + +__protobuf__ = proto.module( + package="google.apps.meet.v2beta", + manifest={ + "CreateSpaceRequest", + "GetSpaceRequest", + "UpdateSpaceRequest", + "EndActiveConferenceRequest", + "GetConferenceRecordRequest", + "ListConferenceRecordsRequest", + "ListConferenceRecordsResponse", + "GetParticipantRequest", + "ListParticipantsRequest", + "ListParticipantsResponse", + "GetParticipantSessionRequest", + "ListParticipantSessionsRequest", + "ListParticipantSessionsResponse", + "GetRecordingRequest", + "ListRecordingsRequest", + "ListRecordingsResponse", + "GetTranscriptRequest", + "ListTranscriptsRequest", + "ListTranscriptsResponse", + "GetTranscriptEntryRequest", + "ListTranscriptEntriesRequest", + "ListTranscriptEntriesResponse", + }, +) + + +class CreateSpaceRequest(proto.Message): + r"""Request to create a space. + + Attributes: + space (google.apps.meet_v2beta.types.Space): + Space to be created. As of May 2023, the + input space can be empty. Later on the input + space can be non-empty when space configuration + is introduced. + """ + + space: resource.Space = proto.Field( + proto.MESSAGE, + number=1, + message=resource.Space, + ) + + +class GetSpaceRequest(proto.Message): + r"""Request to get a space. + + Attributes: + name (str): + Required. Resource name of the space. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSpaceRequest(proto.Message): + r"""Request to update a space. + + Attributes: + space (google.apps.meet_v2beta.types.Space): + Required. Space to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + updated in the space. If update_mask isn't provided, it + defaults to '*' and updates all fields provided in the + request, including deleting fields not set in the request. + """ + + space: resource.Space = proto.Field( + proto.MESSAGE, + number=1, + message=resource.Space, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class EndActiveConferenceRequest(proto.Message): + r"""Request to end an ongoing conference of a space. + + Attributes: + name (str): + Required. Resource name of the space. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetConferenceRecordRequest(proto.Message): + r"""Request to get a conference record. + + Attributes: + name (str): + Required. Resource name of the conference. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConferenceRecordsRequest(proto.Message): + r"""Request to fetch list of conference records per user. + + Attributes: + page_size (int): + Optional. Maximum number of conference + records to return. The service might return + fewer than this value. If unspecified, at most + 25 conference records are returned. The maximum + value is 100; values above 100 are coerced to + 100. Maximum might change in the future. + page_token (str): + Optional. Page token returned from previous + List Call. + filter (str): + Optional. User specified filtering condition in EBNF format. + The following are the filterable fields: + + - ``space.meeting_code`` + - ``space.name`` + - ``start_time`` + - ``end_time`` + + For example, ``space.meeting_code = "abc-mnop-xyz"``. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConferenceRecordsResponse(proto.Message): + r"""Response of ListConferenceRecords method. + + Attributes: + conference_records (MutableSequence[google.apps.meet_v2beta.types.ConferenceRecord]): + List of conferences in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List does NOT include all the + Conferences. Unset if all conferences have been + returned. + """ + + @property + def raw_page(self): + return self + + conference_records: MutableSequence[ + resource.ConferenceRecord + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.ConferenceRecord, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetParticipantRequest(proto.Message): + r"""Request to get a Participant. + + Attributes: + name (str): + Required. Resource name of the participant. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListParticipantsRequest(proto.Message): + r"""Request to fetch list of participant per conference. + + Attributes: + parent (str): + Required. Format: ``conferenceRecords/{conference_record}`` + page_size (int): + Maximum number of participants to return. The + service might return fewer than this value. + If unspecified, at most 100 participants are + returned. The maximum value is 250; values above + 250 are coerced to 250. Maximum might change in + the future. + page_token (str): + Page token returned from previous List Call. + filter (str): + Optional. User specified filtering condition in EBNF format. + The following are the filterable fields: + + - ``earliest_start_time`` + - ``latest_end_time`` + + For example, ``latest_end_time IS NULL`` returns active + participants in the conference. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListParticipantsResponse(proto.Message): + r"""Response of ListParticipants method. + + Attributes: + participants (MutableSequence[google.apps.meet_v2beta.types.Participant]): + List of participants in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List doesn't include all the + participants. Unset if all participants are + returned. + total_size (int): + Total, exact number of ``participants``. By default, this + field isn't included in the response. Set the field mask in + `SystemParameterContext `__ + to receive this field in the response. + """ + + @property + def raw_page(self): + return self + + participants: MutableSequence[resource.Participant] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.Participant, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class GetParticipantSessionRequest(proto.Message): + r"""Request to get a participant session. + + Attributes: + name (str): + Required. Resource name of the participant. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListParticipantSessionsRequest(proto.Message): + r"""Request to fetch list of participant sessions per conference + record per participant. + + Attributes: + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}/participants/{participant}`` + page_size (int): + Optional. Maximum number of participant + sessions to return. The service might return + fewer than this value. If unspecified, at most + 100 participants are returned. The maximum value + is 250; values above 250 are coerced to 250. + Maximum might change in the future. + page_token (str): + Optional. Page token returned from previous + List Call. + filter (str): + Optional. User specified filtering condition in EBNF format. + The following are the filterable fields: + + - ``start_time`` + - ``end_time`` + + For example, ``end_time IS NULL`` returns active participant + sessions in the conference record. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListParticipantSessionsResponse(proto.Message): + r"""Response of ListParticipants method. + + Attributes: + participant_sessions (MutableSequence[google.apps.meet_v2beta.types.ParticipantSession]): + List of participants in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List doesn't include all the + participants. Unset if all participants are + returned. + """ + + @property + def raw_page(self): + return self + + participant_sessions: MutableSequence[ + resource.ParticipantSession + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.ParticipantSession, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetRecordingRequest(proto.Message): + r"""Request message for GetRecording method. + + Attributes: + name (str): + Required. Resource name of the recording. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRecordingsRequest(proto.Message): + r"""Request for ListRecordings method. + + Attributes: + parent (str): + Required. Format: ``conferenceRecords/{conference_record}`` + page_size (int): + Maximum number of recordings to return. The + service might return fewer than this value. + If unspecified, at most 10 recordings are + returned. The maximum value is 100; values above + 100 are coerced to 100. Maximum might change in + the future. + page_token (str): + Page token returned from previous List Call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListRecordingsResponse(proto.Message): + r"""Response for ListRecordings method. + + Attributes: + recordings (MutableSequence[google.apps.meet_v2beta.types.Recording]): + List of recordings in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List doesn't include all the + recordings. Unset if all recordings are + returned. + """ + + @property + def raw_page(self): + return self + + recordings: MutableSequence[resource.Recording] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.Recording, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTranscriptRequest(proto.Message): + r"""Request for GetTranscript method. + + Attributes: + name (str): + Required. Resource name of the transcript. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTranscriptsRequest(proto.Message): + r"""Request for ListTranscripts method. + + Attributes: + parent (str): + Required. Format: ``conferenceRecords/{conference_record}`` + page_size (int): + Maximum number of transcripts to return. The + service might return fewer than this value. + If unspecified, at most 10 transcripts are + returned. The maximum value is 100; values above + 100 are coerced to 100. Maximum might change in + the future. + page_token (str): + Page token returned from previous List Call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTranscriptsResponse(proto.Message): + r"""Response for ListTranscripts method. + + Attributes: + transcripts (MutableSequence[google.apps.meet_v2beta.types.Transcript]): + List of transcripts in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List doesn't include all the + transcripts. Unset if all transcripts are + returned. + """ + + @property + def raw_page(self): + return self + + transcripts: MutableSequence[resource.Transcript] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.Transcript, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTranscriptEntryRequest(proto.Message): + r"""Request for GetTranscriptEntry method. + + Attributes: + name (str): + Required. Resource name of the ``TranscriptEntry``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTranscriptEntriesRequest(proto.Message): + r"""Request for ListTranscriptEntries method. + + Attributes: + parent (str): + Required. Format: + ``conferenceRecords/{conference_record}/transcripts/{transcript}`` + page_size (int): + Maximum number of entries to return. The + service might return fewer than this value. + If unspecified, at most 10 entries are returned. + The maximum value is 100; values above 100 are + coerced to 100. Maximum might change in the + future. + page_token (str): + Page token returned from previous List Call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTranscriptEntriesResponse(proto.Message): + r"""Response for ListTranscriptEntries method + + Attributes: + transcript_entries (MutableSequence[google.apps.meet_v2beta.types.TranscriptEntry]): + List of TranscriptEntries in one page. + next_page_token (str): + Token to be circulated back for further List + call if current List doesn't include all the + transcript entries. Unset if all entries are + returned. + """ + + @property + def raw_page(self): + return self + + transcript_entries: MutableSequence[resource.TranscriptEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.TranscriptEntry, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-meet/mypy.ini b/packages/google-apps-meet/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-apps-meet/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-apps-meet/noxfile.py b/packages/google-apps-meet/noxfile.py new file mode 100644 index 000000000000..7d3551347c78 --- /dev/null +++ b/packages/google-apps-meet/noxfile.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_async.py new file mode 100644 index 000000000000..4fccaa070268 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConferenceRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_conference_record(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetConferenceRecordRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conference_record(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_sync.py new file mode 100644 index 000000000000..cb817f2458fb --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_conference_record_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConferenceRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_conference_record(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetConferenceRecordRequest( + name="name_value", + ) + + # Make the request + response = client.get_conference_record(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_async.py new file mode 100644 index 000000000000..fb8ec9fa0bfd --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_participant(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetParticipant_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_async.py new file mode 100644 index 000000000000..9cf6b1b59bcf --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipantSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_participant_session(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant_session(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_sync.py new file mode 100644 index 000000000000..9e74f86a34ed --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipantSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_participant_session(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant_session(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_sync.py new file mode 100644 index 000000000000..696cf2fe7691 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_participant_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_participant(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetParticipant_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_async.py new file mode 100644 index 000000000000..2793c766b8ca --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRecording +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetRecording_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_recording(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetRecordingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_recording(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetRecording_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_sync.py new file mode 100644 index 000000000000..0c835e68f7d4 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_recording_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRecording +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetRecording_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_recording(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetRecordingRequest( + name="name_value", + ) + + # Make the request + response = client.get_recording(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetRecording_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_async.py new file mode 100644 index 000000000000..aa98597fa09a --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTranscript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetTranscript_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_transcript(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transcript(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetTranscript_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_async.py new file mode 100644 index 000000000000..b80da75d6787 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTranscriptEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_transcript_entry(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transcript_entry(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_sync.py new file mode 100644 index 000000000000..5f7ebf08f585 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTranscriptEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_transcript_entry(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_transcript_entry(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_sync.py new file mode 100644 index 000000000000..31a589c58369 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_get_transcript_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTranscript +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_GetTranscript_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_transcript(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetTranscriptRequest( + name="name_value", + ) + + # Make the request + response = client.get_transcript(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_GetTranscript_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_async.py new file mode 100644 index 000000000000..4a1c17b25279 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConferenceRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_conference_records(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListConferenceRecordsRequest( + ) + + # Make the request + page_result = client.list_conference_records(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_sync.py new file mode 100644 index 000000000000..60390f1f158c --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_conference_records_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConferenceRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_conference_records(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListConferenceRecordsRequest( + ) + + # Make the request + page_result = client.list_conference_records(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_async.py new file mode 100644 index 000000000000..7cfbd2ad6152 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipantSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_participant_sessions(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participant_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_sync.py new file mode 100644 index 000000000000..3d7a519c4bd7 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participant_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipantSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_participant_sessions(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participant_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_async.py new file mode 100644 index 000000000000..c3ab2cac72f7 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListParticipants_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_participants(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListParticipants_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_sync.py new file mode 100644 index 000000000000..8dc1c2162d3f --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_participants_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListParticipants_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_participants(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListParticipants_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_async.py new file mode 100644 index 000000000000..98abc3b31337 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRecordings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListRecordings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_recordings(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListRecordingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recordings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListRecordings_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_sync.py new file mode 100644 index 000000000000..9ebf7e1865ef --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_recordings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRecordings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListRecordings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_recordings(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListRecordingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_recordings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListRecordings_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_async.py new file mode 100644 index 000000000000..18fd6ace6c0e --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTranscriptEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_transcript_entries(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcript_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_sync.py new file mode 100644 index 000000000000..6695bc61d565 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcript_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTranscriptEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_transcript_entries(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcript_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_async.py new file mode 100644 index 000000000000..87d34aedfe03 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTranscripts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_transcripts(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcripts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_sync.py new file mode 100644 index 000000000000..cff2819c2311 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_conference_records_service_list_transcripts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTranscripts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_transcripts(): + # Create a client + client = meet_v2beta.ConferenceRecordsServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListTranscriptsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transcripts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_sync] diff --git a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_async.py similarity index 75% rename from packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_sync.py rename to packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_async.py index c6e104349b38..cff28d9b2765 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_sync.py +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BufferTask +# Snippet for CreateSpace # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-tasks +# python3 -m pip install google-apps-meet -# [START cloudtasks_v2beta2_generated_CloudTasks_BufferTask_sync] +# [START meet_v2beta_generated_SpacesService_CreateSpace_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,21 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import tasks_v2beta2 +from google.apps import meet_v2beta -def sample_buffer_task(): +async def sample_create_space(): # Create a client - client = tasks_v2beta2.CloudTasksClient() + client = meet_v2beta.SpacesServiceAsyncClient() # Initialize request argument(s) - request = tasks_v2beta2.BufferTaskRequest( - queue="queue_value", + request = meet_v2beta.CreateSpaceRequest( ) # Make the request - response = client.buffer_task(request=request) + response = await client.create_space(request=request) # Handle the response print(response) -# [END cloudtasks_v2beta2_generated_CloudTasks_BufferTask_sync] +# [END meet_v2beta_generated_SpacesService_CreateSpace_async] diff --git a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_sync.py similarity index 75% rename from packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_sync.py rename to packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_sync.py index 246b167a25a5..bc898f509d63 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_sync.py +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_space_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BufferTask +# Snippet for CreateSpace # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-tasks +# python3 -m pip install google-apps-meet -# [START cloudtasks_v2beta3_generated_CloudTasks_BufferTask_sync] +# [START meet_v2beta_generated_SpacesService_CreateSpace_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,21 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import tasks_v2beta3 +from google.apps import meet_v2beta -def sample_buffer_task(): +def sample_create_space(): # Create a client - client = tasks_v2beta3.CloudTasksClient() + client = meet_v2beta.SpacesServiceClient() # Initialize request argument(s) - request = tasks_v2beta3.BufferTaskRequest( - queue="queue_value", + request = meet_v2beta.CreateSpaceRequest( ) # Make the request - response = client.buffer_task(request=request) + response = client.create_space(request=request) # Handle the response print(response) -# [END cloudtasks_v2beta3_generated_CloudTasks_BufferTask_sync] +# [END meet_v2beta_generated_SpacesService_CreateSpace_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_async.py new file mode 100644 index 000000000000..bf678fa8854d --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EndActiveConference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_EndActiveConference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_end_active_conference(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.EndActiveConferenceRequest( + name="name_value", + ) + + # Make the request + await client.end_active_conference(request=request) + + +# [END meet_v2beta_generated_SpacesService_EndActiveConference_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_sync.py new file mode 100644 index 000000000000..cfb07ebc571f --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_end_active_conference_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EndActiveConference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_EndActiveConference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_end_active_conference(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.EndActiveConferenceRequest( + name="name_value", + ) + + # Make the request + client.end_active_conference(request=request) + + +# [END meet_v2beta_generated_SpacesService_EndActiveConference_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_async.py new file mode 100644 index 000000000000..4bf3d9367517 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSpace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_GetSpace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_space(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetSpaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_space(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_GetSpace_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_sync.py new file mode 100644 index 000000000000..183d8644ed1d --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_space_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSpace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_GetSpace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_space(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetSpaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_space(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_GetSpace_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_async.py new file mode 100644 index 000000000000..da7ac92ed24c --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSpace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_UpdateSpace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_update_space(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.UpdateSpaceRequest( + ) + + # Make the request + response = await client.update_space(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_UpdateSpace_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_sync.py new file mode 100644 index 000000000000..7a85ea790c3a --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_update_space_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSpace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_UpdateSpace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_update_space(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.UpdateSpaceRequest( + ) + + # Make the request + response = client.update_space(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_UpdateSpace_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json new file mode 100644 index 000000000000..235026b5e66c --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json @@ -0,0 +1,2585 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.apps.meet.v2beta", + "version": "v2beta" + } + ], + "language": "PYTHON", + "name": "google-apps-meet", + "version": "0.1.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_conference_record", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetConferenceRecord", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetConferenceRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetConferenceRecordRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.ConferenceRecord", + "shortName": "get_conference_record" + }, + "description": "Sample for GetConferenceRecord", + "file": "meet_v2beta_generated_conference_records_service_get_conference_record_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_conference_record_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_conference_record", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetConferenceRecord", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetConferenceRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetConferenceRecordRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.ConferenceRecord", + "shortName": "get_conference_record" + }, + "description": "Sample for GetConferenceRecord", + "file": "meet_v2beta_generated_conference_records_service_get_conference_record_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetConferenceRecord_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_conference_record_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_participant_session", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetParticipantSession", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetParticipantSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetParticipantSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.ParticipantSession", + "shortName": "get_participant_session" + }, + "description": "Sample for GetParticipantSession", + "file": "meet_v2beta_generated_conference_records_service_get_participant_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_participant_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_participant_session", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetParticipantSession", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetParticipantSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetParticipantSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.ParticipantSession", + "shortName": "get_participant_session" + }, + "description": "Sample for GetParticipantSession", + "file": "meet_v2beta_generated_conference_records_service_get_participant_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetParticipantSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_participant_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_participant", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetParticipant", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "meet_v2beta_generated_conference_records_service_get_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetParticipant_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_participant", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetParticipant", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "meet_v2beta_generated_conference_records_service_get_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetParticipant_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_recording", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetRecording", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetRecording" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetRecordingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Recording", + "shortName": "get_recording" + }, + "description": "Sample for GetRecording", + "file": "meet_v2beta_generated_conference_records_service_get_recording_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetRecording_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_recording_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_recording", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetRecording", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetRecording" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetRecordingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Recording", + "shortName": "get_recording" + }, + "description": "Sample for GetRecording", + "file": "meet_v2beta_generated_conference_records_service_get_recording_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetRecording_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_recording_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_transcript_entry", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetTranscriptEntry", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetTranscriptEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetTranscriptEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.TranscriptEntry", + "shortName": "get_transcript_entry" + }, + "description": "Sample for GetTranscriptEntry", + "file": "meet_v2beta_generated_conference_records_service_get_transcript_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_transcript_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_transcript_entry", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetTranscriptEntry", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetTranscriptEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetTranscriptEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.TranscriptEntry", + "shortName": "get_transcript_entry" + }, + "description": "Sample for GetTranscriptEntry", + "file": "meet_v2beta_generated_conference_records_service_get_transcript_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetTranscriptEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_transcript_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.get_transcript", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetTranscript", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetTranscript" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetTranscriptRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Transcript", + "shortName": "get_transcript" + }, + "description": "Sample for GetTranscript", + "file": "meet_v2beta_generated_conference_records_service_get_transcript_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetTranscript_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_transcript_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.get_transcript", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.GetTranscript", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "GetTranscript" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetTranscriptRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Transcript", + "shortName": "get_transcript" + }, + "description": "Sample for GetTranscript", + "file": "meet_v2beta_generated_conference_records_service_get_transcript_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_GetTranscript_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_get_transcript_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_conference_records", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListConferenceRecords", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListConferenceRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListConferenceRecordsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListConferenceRecordsAsyncPager", + "shortName": "list_conference_records" + }, + "description": "Sample for ListConferenceRecords", + "file": "meet_v2beta_generated_conference_records_service_list_conference_records_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_conference_records_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_conference_records", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListConferenceRecords", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListConferenceRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListConferenceRecordsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListConferenceRecordsPager", + "shortName": "list_conference_records" + }, + "description": "Sample for ListConferenceRecords", + "file": "meet_v2beta_generated_conference_records_service_list_conference_records_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListConferenceRecords_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_conference_records_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_participant_sessions", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListParticipantSessions", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListParticipantSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListParticipantSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantSessionsAsyncPager", + "shortName": "list_participant_sessions" + }, + "description": "Sample for ListParticipantSessions", + "file": "meet_v2beta_generated_conference_records_service_list_participant_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_participant_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_participant_sessions", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListParticipantSessions", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListParticipantSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListParticipantSessionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantSessionsPager", + "shortName": "list_participant_sessions" + }, + "description": "Sample for ListParticipantSessions", + "file": "meet_v2beta_generated_conference_records_service_list_participant_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListParticipantSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_participant_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_participants", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListParticipants", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantsAsyncPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "meet_v2beta_generated_conference_records_service_list_participants_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListParticipants_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_participants_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_participants", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListParticipants", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListParticipantsPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "meet_v2beta_generated_conference_records_service_list_participants_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListParticipants_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_participants_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_recordings", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListRecordings", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListRecordings" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListRecordingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListRecordingsAsyncPager", + "shortName": "list_recordings" + }, + "description": "Sample for ListRecordings", + "file": "meet_v2beta_generated_conference_records_service_list_recordings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListRecordings_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_recordings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_recordings", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListRecordings", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListRecordings" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListRecordingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListRecordingsPager", + "shortName": "list_recordings" + }, + "description": "Sample for ListRecordings", + "file": "meet_v2beta_generated_conference_records_service_list_recordings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListRecordings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_recordings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_transcript_entries", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListTranscriptEntries", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListTranscriptEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListTranscriptEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesAsyncPager", + "shortName": "list_transcript_entries" + }, + "description": "Sample for ListTranscriptEntries", + "file": "meet_v2beta_generated_conference_records_service_list_transcript_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_transcript_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_transcript_entries", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListTranscriptEntries", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListTranscriptEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListTranscriptEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesPager", + "shortName": "list_transcript_entries" + }, + "description": "Sample for ListTranscriptEntries", + "file": "meet_v2beta_generated_conference_records_service_list_transcript_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListTranscriptEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_transcript_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient", + "shortName": "ConferenceRecordsServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceAsyncClient.list_transcripts", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListTranscripts", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListTranscripts" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListTranscriptsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptsAsyncPager", + "shortName": "list_transcripts" + }, + "description": "Sample for ListTranscripts", + "file": "meet_v2beta_generated_conference_records_service_list_transcripts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_transcripts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient", + "shortName": "ConferenceRecordsServiceClient" + }, + "fullName": "google.apps.meet_v2beta.ConferenceRecordsServiceClient.list_transcripts", + "method": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService.ListTranscripts", + "service": { + "fullName": "google.apps.meet.v2beta.ConferenceRecordsService", + "shortName": "ConferenceRecordsService" + }, + "shortName": "ListTranscripts" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListTranscriptsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptsPager", + "shortName": "list_transcripts" + }, + "description": "Sample for ListTranscripts", + "file": "meet_v2beta_generated_conference_records_service_list_transcripts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_ConferenceRecordsService_ListTranscripts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_conference_records_service_list_transcripts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.create_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.CreateSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "CreateSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.CreateSpaceRequest" + }, + { + "name": "space", + "type": "google.apps.meet_v2beta.types.Space" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "create_space" + }, + "description": "Sample for CreateSpace", + "file": "meet_v2beta_generated_spaces_service_create_space_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_CreateSpace_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_create_space_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.create_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.CreateSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "CreateSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.CreateSpaceRequest" + }, + { + "name": "space", + "type": "google.apps.meet_v2beta.types.Space" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "create_space" + }, + "description": "Sample for CreateSpace", + "file": "meet_v2beta_generated_spaces_service_create_space_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_CreateSpace_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_create_space_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.end_active_conference", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "EndActiveConference" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "end_active_conference" + }, + "description": "Sample for EndActiveConference", + "file": "meet_v2beta_generated_spaces_service_end_active_conference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_end_active_conference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.end_active_conference", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "EndActiveConference" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "end_active_conference" + }, + "description": "Sample for EndActiveConference", + "file": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.get_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "get_space" + }, + "description": "Sample for GetSpace", + "file": "meet_v2beta_generated_spaces_service_get_space_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_space_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.get_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "get_space" + }, + "description": "Sample for GetSpace", + "file": "meet_v2beta_generated_spaces_service_get_space_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_space_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.update_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.UpdateSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "UpdateSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.UpdateSpaceRequest" + }, + { + "name": "space", + "type": "google.apps.meet_v2beta.types.Space" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "update_space" + }, + "description": "Sample for UpdateSpace", + "file": "meet_v2beta_generated_spaces_service_update_space_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_UpdateSpace_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_update_space_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.update_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.UpdateSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "UpdateSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.UpdateSpaceRequest" + }, + { + "name": "space", + "type": "google.apps.meet_v2beta.types.Space" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "update_space" + }, + "description": "Sample for UpdateSpace", + "file": "meet_v2beta_generated_spaces_service_update_space_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_UpdateSpace_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_update_space_sync.py" + } + ] +} diff --git a/packages/google-apps-meet/scripts/decrypt-secrets.sh b/packages/google-apps-meet/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-apps-meet/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py b/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py new file mode 100644 index 000000000000..147ce8a1274f --- /dev/null +++ b/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class meetCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_space': ('space', ), + 'end_active_conference': ('name', ), + 'get_conference_record': ('name', ), + 'get_participant': ('name', ), + 'get_participant_session': ('name', ), + 'get_recording': ('name', ), + 'get_space': ('name', ), + 'get_transcript': ('name', ), + 'get_transcript_entry': ('name', ), + 'list_conference_records': ('page_size', 'page_token', 'filter', ), + 'list_participants': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_participant_sessions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_recordings': ('parent', 'page_size', 'page_token', ), + 'list_transcript_entries': ('parent', 'page_size', 'page_token', ), + 'list_transcripts': ('parent', 'page_size', 'page_token', ), + 'update_space': ('space', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=meetCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the meet client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-apps-meet/setup.py b/packages/google-apps-meet/setup.py new file mode 100644 index 000000000000..73445f028822 --- /dev/null +++ b/packages/google-apps-meet/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-apps-meet" + + +description = "Google Apps Meet API client library" + +version = None + +with open(os.path.join(package_root, "google/apps/meet/gapic_version.py")) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-apps-meet/testing/.gitignore b/packages/google-apps-meet/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-apps-meet/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-apps-meet/testing/constraints-3.10.txt b/packages/google-apps-meet/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-apps-meet/testing/constraints-3.11.txt b/packages/google-apps-meet/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-apps-meet/testing/constraints-3.12.txt b/packages/google-apps-meet/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-apps-meet/testing/constraints-3.7.txt b/packages/google-apps-meet/testing/constraints-3.7.txt new file mode 100644 index 000000000000..185f7d366c2f --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-apps-meet/testing/constraints-3.8.txt b/packages/google-apps-meet/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-apps-meet/testing/constraints-3.9.txt b/packages/google-apps-meet/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-apps-meet/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-apps-meet/tests/__init__.py b/packages/google-apps-meet/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-apps-meet/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-apps-meet/tests/unit/__init__.py b/packages/google-apps-meet/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-apps-meet/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-apps-meet/tests/unit/gapic/__init__.py b/packages/google-apps-meet/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-apps-meet/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/__init__.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py new file mode 100644 index 000000000000..e99c56a7b750 --- /dev/null +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -0,0 +1,9169 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.apps.meet_v2beta.services.conference_records_service import ( + ConferenceRecordsServiceAsyncClient, + ConferenceRecordsServiceClient, + pagers, + transports, +) +from google.apps.meet_v2beta.types import resource, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConferenceRecordsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ConferenceRecordsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConferenceRecordsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConferenceRecordsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConferenceRecordsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConferenceRecordsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConferenceRecordsServiceClient, "grpc"), + (ConferenceRecordsServiceAsyncClient, "grpc_asyncio"), + (ConferenceRecordsServiceClient, "rest"), + ], +) +def test_conference_records_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConferenceRecordsServiceGrpcTransport, "grpc"), + (transports.ConferenceRecordsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ConferenceRecordsServiceRestTransport, "rest"), + ], +) +def test_conference_records_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConferenceRecordsServiceClient, "grpc"), + (ConferenceRecordsServiceAsyncClient, "grpc_asyncio"), + (ConferenceRecordsServiceClient, "rest"), + ], +) +def test_conference_records_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +def test_conference_records_service_client_get_transport_class(): + transport = ConferenceRecordsServiceClient.get_transport_class() + available_transports = [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceRestTransport, + ] + assert transport in available_transports + + transport = ConferenceRecordsServiceClient.get_transport_class("grpc") + assert transport == transports.ConferenceRecordsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ConferenceRecordsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceClient), +) +@mock.patch.object( + ConferenceRecordsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceAsyncClient), +) +def test_conference_records_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + ConferenceRecordsServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + ConferenceRecordsServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + "true", + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + "false", + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceRestTransport, + "rest", + "true", + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ConferenceRecordsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceClient), +) +@mock.patch.object( + ConferenceRecordsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_conference_records_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [ConferenceRecordsServiceClient, ConferenceRecordsServiceAsyncClient], +) +@mock.patch.object( + ConferenceRecordsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceClient), +) +@mock.patch.object( + ConferenceRecordsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConferenceRecordsServiceAsyncClient), +) +def test_conference_records_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceRestTransport, + "rest", + ), + ], +) +def test_conference_records_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_conference_records_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_conference_records_service_client_client_options_from_dict(): + with mock.patch( + "google.apps.meet_v2beta.services.conference_records_service.transports.ConferenceRecordsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConferenceRecordsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_conference_records_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "meet.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="meet.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConferenceRecordRequest, + dict, + ], +) +def test_get_conference_record(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ConferenceRecord( + name="name_value", + space="space_value", + ) + response = client.get_conference_record(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConferenceRecordRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ConferenceRecord) + assert response.name == "name_value" + assert response.space == "space_value" + + +def test_get_conference_record_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + client.get_conference_record() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConferenceRecordRequest() + + +@pytest.mark.asyncio +async def test_get_conference_record_async( + transport: str = "grpc_asyncio", request_type=service.GetConferenceRecordRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ConferenceRecord( + name="name_value", + space="space_value", + ) + ) + response = await client.get_conference_record(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetConferenceRecordRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ConferenceRecord) + assert response.name == "name_value" + assert response.space == "space_value" + + +@pytest.mark.asyncio +async def test_get_conference_record_async_from_dict(): + await test_get_conference_record_async(request_type=dict) + + +def test_get_conference_record_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConferenceRecordRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + call.return_value = resource.ConferenceRecord() + client.get_conference_record(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_conference_record_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetConferenceRecordRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ConferenceRecord() + ) + await client.get_conference_record(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_conference_record_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ConferenceRecord() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_conference_record( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_conference_record_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conference_record( + service.GetConferenceRecordRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_conference_record_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conference_record), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ConferenceRecord() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ConferenceRecord() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_conference_record( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_conference_record_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_conference_record( + service.GetConferenceRecordRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListConferenceRecordsRequest, + dict, + ], +) +def test_list_conference_records(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListConferenceRecordsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_conference_records(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListConferenceRecordsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConferenceRecordsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_conference_records_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), "__call__" + ) as call: + client.list_conference_records() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListConferenceRecordsRequest() + + +@pytest.mark.asyncio +async def test_list_conference_records_async( + transport: str = "grpc_asyncio", request_type=service.ListConferenceRecordsRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListConferenceRecordsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_conference_records(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListConferenceRecordsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConferenceRecordsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_conference_records_async_from_dict(): + await test_list_conference_records_async(request_type=dict) + + +def test_list_conference_records_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + next_page_token="abc", + ), + service.ListConferenceRecordsResponse( + conference_records=[], + next_page_token="def", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + ], + next_page_token="ghi", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_conference_records(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.ConferenceRecord) for i in results) + + +def test_list_conference_records_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + next_page_token="abc", + ), + service.ListConferenceRecordsResponse( + conference_records=[], + next_page_token="def", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + ], + next_page_token="ghi", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + ), + RuntimeError, + ) + pages = list(client.list_conference_records(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_conference_records_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + next_page_token="abc", + ), + service.ListConferenceRecordsResponse( + conference_records=[], + next_page_token="def", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + ], + next_page_token="ghi", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_conference_records( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.ConferenceRecord) for i in responses) + + +@pytest.mark.asyncio +async def test_list_conference_records_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conference_records), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + next_page_token="abc", + ), + service.ListConferenceRecordsResponse( + conference_records=[], + next_page_token="def", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + ], + next_page_token="ghi", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_conference_records(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetParticipantRequest, + dict, + ], +) +def test_get_participant(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Participant( + name="name_value", + ) + response = client.get_participant(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Participant) + assert response.name == "name_value" + + +def test_get_participant_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + client.get_participant() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantRequest() + + +@pytest.mark.asyncio +async def test_get_participant_async( + transport: str = "grpc_asyncio", request_type=service.GetParticipantRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Participant( + name="name_value", + ) + ) + response = await client.get_participant(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Participant) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_participant_async_from_dict(): + await test_get_participant_async(request_type=dict) + + +def test_get_participant_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetParticipantRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + call.return_value = resource.Participant() + client.get_participant(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_participant_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetParticipantRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Participant() + ) + await client.get_participant(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_participant_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Participant() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_participant( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_participant_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_participant( + service.GetParticipantRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_participant_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_participant), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Participant() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Participant() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_participant( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_participant_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_participant( + service.GetParticipantRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListParticipantsRequest, + dict, + ], +) +def test_list_participants(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_participants(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_participants_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + client.list_participants() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantsRequest() + + +@pytest.mark.asyncio +async def test_list_participants_async( + transport: str = "grpc_asyncio", request_type=service.ListParticipantsRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_participants(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_participants_async_from_dict(): + await test_list_participants_async(request_type=dict) + + +def test_list_participants_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListParticipantsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + call.return_value = service.ListParticipantsResponse() + client.list_participants(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_participants_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListParticipantsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantsResponse() + ) + await client.list_participants(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_participants_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_participants( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_participants_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_participants( + service.ListParticipantsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_participants_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_participants( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_participants_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_participants( + service.ListParticipantsRequest(), + parent="parent_value", + ) + + +def test_list_participants_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + resource.Participant(), + ], + next_page_token="abc", + ), + service.ListParticipantsResponse( + participants=[], + next_page_token="def", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + ], + next_page_token="ghi", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_participants(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Participant) for i in results) + + +def test_list_participants_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + resource.Participant(), + ], + next_page_token="abc", + ), + service.ListParticipantsResponse( + participants=[], + next_page_token="def", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + ], + next_page_token="ghi", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + ], + ), + RuntimeError, + ) + pages = list(client.list_participants(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_participants_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + resource.Participant(), + ], + next_page_token="abc", + ), + service.ListParticipantsResponse( + participants=[], + next_page_token="def", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + ], + next_page_token="ghi", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_participants( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.Participant) for i in responses) + + +@pytest.mark.asyncio +async def test_list_participants_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participants), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + resource.Participant(), + ], + next_page_token="abc", + ), + service.ListParticipantsResponse( + participants=[], + next_page_token="def", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + ], + next_page_token="ghi", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_participants(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetParticipantSessionRequest, + dict, + ], +) +def test_get_participant_session(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ParticipantSession( + name="name_value", + ) + response = client.get_participant_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ParticipantSession) + assert response.name == "name_value" + + +def test_get_participant_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + client.get_participant_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantSessionRequest() + + +@pytest.mark.asyncio +async def test_get_participant_session_async( + transport: str = "grpc_asyncio", request_type=service.GetParticipantSessionRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ParticipantSession( + name="name_value", + ) + ) + response = await client.get_participant_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetParticipantSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ParticipantSession) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_participant_session_async_from_dict(): + await test_get_participant_session_async(request_type=dict) + + +def test_get_participant_session_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetParticipantSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + call.return_value = resource.ParticipantSession() + client.get_participant_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_participant_session_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetParticipantSessionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ParticipantSession() + ) + await client.get_participant_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_participant_session_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ParticipantSession() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_participant_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_participant_session_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_participant_session( + service.GetParticipantSessionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_participant_session_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_participant_session), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.ParticipantSession() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.ParticipantSession() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_participant_session( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_participant_session_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_participant_session( + service.GetParticipantSessionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListParticipantSessionsRequest, + dict, + ], +) +def test_list_participant_sessions(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantSessionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_participant_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantSessionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_participant_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + client.list_participant_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantSessionsRequest() + + +@pytest.mark.asyncio +async def test_list_participant_sessions_async( + transport: str = "grpc_asyncio", request_type=service.ListParticipantSessionsRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantSessionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_participant_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListParticipantSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantSessionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_participant_sessions_async_from_dict(): + await test_list_participant_sessions_async(request_type=dict) + + +def test_list_participant_sessions_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListParticipantSessionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + call.return_value = service.ListParticipantSessionsResponse() + client.list_participant_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_participant_sessions_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListParticipantSessionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantSessionsResponse() + ) + await client.list_participant_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_participant_sessions_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_participant_sessions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_participant_sessions_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_participant_sessions( + service.ListParticipantSessionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_participant_sessions_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListParticipantSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListParticipantSessionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_participant_sessions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_participant_sessions_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_participant_sessions( + service.ListParticipantSessionsRequest(), + parent="parent_value", + ) + + +def test_list_participant_sessions_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + next_page_token="abc", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[], + next_page_token="def", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + ], + next_page_token="ghi", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_participant_sessions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.ParticipantSession) for i in results) + + +def test_list_participant_sessions_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + next_page_token="abc", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[], + next_page_token="def", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + ], + next_page_token="ghi", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + ), + RuntimeError, + ) + pages = list(client.list_participant_sessions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_participant_sessions_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + next_page_token="abc", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[], + next_page_token="def", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + ], + next_page_token="ghi", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_participant_sessions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.ParticipantSession) for i in responses) + + +@pytest.mark.asyncio +async def test_list_participant_sessions_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_participant_sessions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + next_page_token="abc", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[], + next_page_token="def", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + ], + next_page_token="ghi", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_participant_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetRecordingRequest, + dict, + ], +) +def test_get_recording(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Recording( + name="name_value", + state=resource.Recording.State.STARTED, + ) + response = client.get_recording(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetRecordingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Recording) + assert response.name == "name_value" + assert response.state == resource.Recording.State.STARTED + + +def test_get_recording_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + client.get_recording() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetRecordingRequest() + + +@pytest.mark.asyncio +async def test_get_recording_async( + transport: str = "grpc_asyncio", request_type=service.GetRecordingRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Recording( + name="name_value", + state=resource.Recording.State.STARTED, + ) + ) + response = await client.get_recording(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetRecordingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Recording) + assert response.name == "name_value" + assert response.state == resource.Recording.State.STARTED + + +@pytest.mark.asyncio +async def test_get_recording_async_from_dict(): + await test_get_recording_async(request_type=dict) + + +def test_get_recording_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetRecordingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + call.return_value = resource.Recording() + client.get_recording(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_recording_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetRecordingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Recording()) + await client.get_recording(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_recording_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Recording() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_recording( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_recording_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_recording( + service.GetRecordingRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_recording_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_recording), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Recording() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Recording()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_recording( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_recording_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_recording( + service.GetRecordingRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListRecordingsRequest, + dict, + ], +) +def test_list_recordings(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListRecordingsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_recordings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListRecordingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecordingsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_recordings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + client.list_recordings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListRecordingsRequest() + + +@pytest.mark.asyncio +async def test_list_recordings_async( + transport: str = "grpc_asyncio", request_type=service.ListRecordingsRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRecordingsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_recordings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListRecordingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecordingsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_recordings_async_from_dict(): + await test_list_recordings_async(request_type=dict) + + +def test_list_recordings_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListRecordingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + call.return_value = service.ListRecordingsResponse() + client.list_recordings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_recordings_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListRecordingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRecordingsResponse() + ) + await client.list_recordings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_recordings_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListRecordingsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_recordings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_recordings_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_recordings( + service.ListRecordingsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_recordings_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListRecordingsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListRecordingsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_recordings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_recordings_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_recordings( + service.ListRecordingsRequest(), + parent="parent_value", + ) + + +def test_list_recordings_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + resource.Recording(), + ], + next_page_token="abc", + ), + service.ListRecordingsResponse( + recordings=[], + next_page_token="def", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + ], + next_page_token="ghi", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_recordings(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Recording) for i in results) + + +def test_list_recordings_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_recordings), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + resource.Recording(), + ], + next_page_token="abc", + ), + service.ListRecordingsResponse( + recordings=[], + next_page_token="def", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + ], + next_page_token="ghi", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + ], + ), + RuntimeError, + ) + pages = list(client.list_recordings(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_recordings_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recordings), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + resource.Recording(), + ], + next_page_token="abc", + ), + service.ListRecordingsResponse( + recordings=[], + next_page_token="def", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + ], + next_page_token="ghi", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_recordings( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.Recording) for i in responses) + + +@pytest.mark.asyncio +async def test_list_recordings_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recordings), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + resource.Recording(), + ], + next_page_token="abc", + ), + service.ListRecordingsResponse( + recordings=[], + next_page_token="def", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + ], + next_page_token="ghi", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_recordings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTranscriptRequest, + dict, + ], +) +def test_get_transcript(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Transcript( + name="name_value", + state=resource.Transcript.State.STARTED, + ) + response = client.get_transcript(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Transcript) + assert response.name == "name_value" + assert response.state == resource.Transcript.State.STARTED + + +def test_get_transcript_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + client.get_transcript() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptRequest() + + +@pytest.mark.asyncio +async def test_get_transcript_async( + transport: str = "grpc_asyncio", request_type=service.GetTranscriptRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Transcript( + name="name_value", + state=resource.Transcript.State.STARTED, + ) + ) + response = await client.get_transcript(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Transcript) + assert response.name == "name_value" + assert response.state == resource.Transcript.State.STARTED + + +@pytest.mark.asyncio +async def test_get_transcript_async_from_dict(): + await test_get_transcript_async(request_type=dict) + + +def test_get_transcript_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTranscriptRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + call.return_value = resource.Transcript() + client.get_transcript(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transcript_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTranscriptRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Transcript()) + await client.get_transcript(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_transcript_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Transcript() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transcript( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_transcript_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transcript( + service.GetTranscriptRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transcript_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transcript), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Transcript() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Transcript()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transcript( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_transcript_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transcript( + service.GetTranscriptRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTranscriptsRequest, + dict, + ], +) +def test_list_transcripts(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_transcripts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transcripts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + client.list_transcripts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptsRequest() + + +@pytest.mark.asyncio +async def test_list_transcripts_async( + transport: str = "grpc_asyncio", request_type=service.ListTranscriptsRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_transcripts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_transcripts_async_from_dict(): + await test_list_transcripts_async(request_type=dict) + + +def test_list_transcripts_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTranscriptsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + call.return_value = service.ListTranscriptsResponse() + client.list_transcripts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transcripts_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTranscriptsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptsResponse() + ) + await client.list_transcripts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_transcripts_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transcripts( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_transcripts_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transcripts( + service.ListTranscriptsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transcripts_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transcripts( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_transcripts_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transcripts( + service.ListTranscriptsRequest(), + parent="parent_value", + ) + + +def test_list_transcripts_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + resource.Transcript(), + ], + next_page_token="abc", + ), + service.ListTranscriptsResponse( + transcripts=[], + next_page_token="def", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + ], + next_page_token="ghi", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transcripts(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Transcript) for i in results) + + +def test_list_transcripts_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_transcripts), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + resource.Transcript(), + ], + next_page_token="abc", + ), + service.ListTranscriptsResponse( + transcripts=[], + next_page_token="def", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + ], + next_page_token="ghi", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transcripts(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transcripts_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcripts), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + resource.Transcript(), + ], + next_page_token="abc", + ), + service.ListTranscriptsResponse( + transcripts=[], + next_page_token="def", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + ], + next_page_token="ghi", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transcripts( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.Transcript) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transcripts_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcripts), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + resource.Transcript(), + ], + next_page_token="abc", + ), + service.ListTranscriptsResponse( + transcripts=[], + next_page_token="def", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + ], + next_page_token="ghi", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_transcripts(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTranscriptEntryRequest, + dict, + ], +) +def test_get_transcript_entry(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.TranscriptEntry( + name="name_value", + participant="participant_value", + text="text_value", + language_code="language_code_value", + ) + response = client.get_transcript_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.TranscriptEntry) + assert response.name == "name_value" + assert response.participant == "participant_value" + assert response.text == "text_value" + assert response.language_code == "language_code_value" + + +def test_get_transcript_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + client.get_transcript_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptEntryRequest() + + +@pytest.mark.asyncio +async def test_get_transcript_entry_async( + transport: str = "grpc_asyncio", request_type=service.GetTranscriptEntryRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.TranscriptEntry( + name="name_value", + participant="participant_value", + text="text_value", + language_code="language_code_value", + ) + ) + response = await client.get_transcript_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetTranscriptEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.TranscriptEntry) + assert response.name == "name_value" + assert response.participant == "participant_value" + assert response.text == "text_value" + assert response.language_code == "language_code_value" + + +@pytest.mark.asyncio +async def test_get_transcript_entry_async_from_dict(): + await test_get_transcript_entry_async(request_type=dict) + + +def test_get_transcript_entry_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTranscriptEntryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + call.return_value = resource.TranscriptEntry() + client.get_transcript_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transcript_entry_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetTranscriptEntryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.TranscriptEntry() + ) + await client.get_transcript_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_transcript_entry_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.TranscriptEntry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transcript_entry( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_transcript_entry_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transcript_entry( + service.GetTranscriptEntryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transcript_entry_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transcript_entry), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resource.TranscriptEntry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.TranscriptEntry() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transcript_entry( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_transcript_entry_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transcript_entry( + service.GetTranscriptEntryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTranscriptEntriesRequest, + dict, + ], +) +def test_list_transcript_entries(request_type, transport: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptEntriesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_transcript_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptEntriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transcript_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + client.list_transcript_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptEntriesRequest() + + +@pytest.mark.asyncio +async def test_list_transcript_entries_async( + transport: str = "grpc_asyncio", request_type=service.ListTranscriptEntriesRequest +): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_transcript_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListTranscriptEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptEntriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_transcript_entries_async_from_dict(): + await test_list_transcript_entries_async(request_type=dict) + + +def test_list_transcript_entries_field_headers(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTranscriptEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + call.return_value = service.ListTranscriptEntriesResponse() + client.list_transcript_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transcript_entries_field_headers_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListTranscriptEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptEntriesResponse() + ) + await client.list_transcript_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_transcript_entries_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transcript_entries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_transcript_entries_flattened_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transcript_entries( + service.ListTranscriptEntriesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transcript_entries_flattened_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListTranscriptEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListTranscriptEntriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transcript_entries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_transcript_entries_flattened_error_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transcript_entries( + service.ListTranscriptEntriesRequest(), + parent="parent_value", + ) + + +def test_list_transcript_entries_pager(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + next_page_token="abc", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[], + next_page_token="def", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + ], + next_page_token="ghi", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transcript_entries(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.TranscriptEntry) for i in results) + + +def test_list_transcript_entries_pages(transport_name: str = "grpc"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + next_page_token="abc", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[], + next_page_token="def", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + ], + next_page_token="ghi", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transcript_entries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transcript_entries_async_pager(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + next_page_token="abc", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[], + next_page_token="def", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + ], + next_page_token="ghi", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transcript_entries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resource.TranscriptEntry) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transcript_entries_async_pages(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transcript_entries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + next_page_token="abc", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[], + next_page_token="def", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + ], + next_page_token="ghi", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_transcript_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetConferenceRecordRequest, + dict, + ], +) +def test_get_conference_record_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.ConferenceRecord( + name="name_value", + space="space_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.ConferenceRecord.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_conference_record(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ConferenceRecord) + assert response.name == "name_value" + assert response.space == "space_value" + + +def test_get_conference_record_rest_required_fields( + request_type=service.GetConferenceRecordRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conference_record._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_conference_record._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.ConferenceRecord() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.ConferenceRecord.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_conference_record(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_conference_record_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_conference_record._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_conference_record_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_get_conference_record" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_get_conference_record" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetConferenceRecordRequest.pb( + service.GetConferenceRecordRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.ConferenceRecord.to_json( + resource.ConferenceRecord() + ) + + request = service.GetConferenceRecordRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.ConferenceRecord() + + client.get_conference_record( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_conference_record_rest_bad_request( + transport: str = "rest", request_type=service.GetConferenceRecordRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_conference_record(request) + + +def test_get_conference_record_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.ConferenceRecord() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "conferenceRecords/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.ConferenceRecord.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_conference_record(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*}" % client.transport._host, args[1] + ) + + +def test_get_conference_record_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conference_record( + service.GetConferenceRecordRequest(), + name="name_value", + ) + + +def test_get_conference_record_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListConferenceRecordsRequest, + dict, + ], +) +def test_list_conference_records_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListConferenceRecordsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListConferenceRecordsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_conference_records(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConferenceRecordsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_conference_records_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_conference_records", + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "pre_list_conference_records", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListConferenceRecordsRequest.pb( + service.ListConferenceRecordsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListConferenceRecordsResponse.to_json( + service.ListConferenceRecordsResponse() + ) + + request = service.ListConferenceRecordsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListConferenceRecordsResponse() + + client.list_conference_records( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_conference_records_rest_bad_request( + transport: str = "rest", request_type=service.ListConferenceRecordsRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_conference_records(request) + + +def test_list_conference_records_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + next_page_token="abc", + ), + service.ListConferenceRecordsResponse( + conference_records=[], + next_page_token="def", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + ], + next_page_token="ghi", + ), + service.ListConferenceRecordsResponse( + conference_records=[ + resource.ConferenceRecord(), + resource.ConferenceRecord(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListConferenceRecordsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_conference_records(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.ConferenceRecord) for i in results) + + pages = list(client.list_conference_records(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetParticipantRequest, + dict, + ], +) +def test_get_participant_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/participants/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Participant( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Participant.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_participant(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Participant) + assert response.name == "name_value" + + +def test_get_participant_rest_required_fields( + request_type=service.GetParticipantRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_participant._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_participant._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Participant() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Participant.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_participant(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_participant_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_participant._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_participant_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_get_participant" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_get_participant" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetParticipantRequest.pb(service.GetParticipantRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Participant.to_json(resource.Participant()) + + request = service.GetParticipantRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Participant() + + client.get_participant( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_participant_rest_bad_request( + transport: str = "rest", request_type=service.GetParticipantRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/participants/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_participant(request) + + +def test_get_participant_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Participant() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "conferenceRecords/sample1/participants/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Participant.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_participant(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*/participants/*}" + % client.transport._host, + args[1], + ) + + +def test_get_participant_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_participant( + service.GetParticipantRequest(), + name="name_value", + ) + + +def test_get_participant_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListParticipantsRequest, + dict, + ], +) +def test_list_participants_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListParticipantsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_participants(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_participants_rest_required_fields( + request_type=service.ListParticipantsRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_participants._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_participants._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListParticipantsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_participants(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_participants_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_participants._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_participants_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_list_participants" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_list_participants" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListParticipantsRequest.pb( + service.ListParticipantsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListParticipantsResponse.to_json( + service.ListParticipantsResponse() + ) + + request = service.ListParticipantsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListParticipantsResponse() + + client.list_participants( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_participants_rest_bad_request( + transport: str = "rest", request_type=service.ListParticipantsRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_participants(request) + + +def test_list_participants_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "conferenceRecords/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListParticipantsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_participants(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=conferenceRecords/*}/participants" + % client.transport._host, + args[1], + ) + + +def test_list_participants_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_participants( + service.ListParticipantsRequest(), + parent="parent_value", + ) + + +def test_list_participants_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + resource.Participant(), + ], + next_page_token="abc", + ), + service.ListParticipantsResponse( + participants=[], + next_page_token="def", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + ], + next_page_token="ghi", + ), + service.ListParticipantsResponse( + participants=[ + resource.Participant(), + resource.Participant(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListParticipantsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "conferenceRecords/sample1"} + + pager = client.list_participants(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Participant) for i in results) + + pages = list(client.list_participants(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetParticipantSessionRequest, + dict, + ], +) +def test_get_participant_session_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "conferenceRecords/sample1/participants/sample2/participantSessions/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.ParticipantSession( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.ParticipantSession.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_participant_session(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.ParticipantSession) + assert response.name == "name_value" + + +def test_get_participant_session_rest_required_fields( + request_type=service.GetParticipantSessionRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_participant_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_participant_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.ParticipantSession() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.ParticipantSession.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_participant_session(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_participant_session_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_participant_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_participant_session_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_participant_session", + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "pre_get_participant_session", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetParticipantSessionRequest.pb( + service.GetParticipantSessionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.ParticipantSession.to_json( + resource.ParticipantSession() + ) + + request = service.GetParticipantSessionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.ParticipantSession() + + client.get_participant_session( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_participant_session_rest_bad_request( + transport: str = "rest", request_type=service.GetParticipantSessionRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "conferenceRecords/sample1/participants/sample2/participantSessions/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_participant_session(request) + + +def test_get_participant_session_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.ParticipantSession() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "conferenceRecords/sample1/participants/sample2/participantSessions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.ParticipantSession.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_participant_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*/participants/*/participantSessions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_participant_session_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_participant_session( + service.GetParticipantSessionRequest(), + name="name_value", + ) + + +def test_get_participant_session_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListParticipantSessionsRequest, + dict, + ], +) +def test_list_participant_sessions_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1/participants/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantSessionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListParticipantSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_participant_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListParticipantSessionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_participant_sessions_rest_required_fields( + request_type=service.ListParticipantSessionsRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_participant_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_participant_sessions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListParticipantSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_participant_sessions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_participant_sessions_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_participant_sessions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_participant_sessions_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_participant_sessions", + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "pre_list_participant_sessions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListParticipantSessionsRequest.pb( + service.ListParticipantSessionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListParticipantSessionsResponse.to_json( + service.ListParticipantSessionsResponse() + ) + + request = service.ListParticipantSessionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListParticipantSessionsResponse() + + client.list_participant_sessions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_participant_sessions_rest_bad_request( + transport: str = "rest", request_type=service.ListParticipantSessionsRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1/participants/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_participant_sessions(request) + + +def test_list_participant_sessions_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListParticipantSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "conferenceRecords/sample1/participants/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListParticipantSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_participant_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=conferenceRecords/*/participants/*}/participantSessions" + % client.transport._host, + args[1], + ) + + +def test_list_participant_sessions_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_participant_sessions( + service.ListParticipantSessionsRequest(), + parent="parent_value", + ) + + +def test_list_participant_sessions_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + next_page_token="abc", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[], + next_page_token="def", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + ], + next_page_token="ghi", + ), + service.ListParticipantSessionsResponse( + participant_sessions=[ + resource.ParticipantSession(), + resource.ParticipantSession(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListParticipantSessionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "conferenceRecords/sample1/participants/sample2"} + + pager = client.list_participant_sessions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.ParticipantSession) for i in results) + + pages = list(client.list_participant_sessions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetRecordingRequest, + dict, + ], +) +def test_get_recording_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/recordings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Recording( + name="name_value", + state=resource.Recording.State.STARTED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Recording.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_recording(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Recording) + assert response.name == "name_value" + assert response.state == resource.Recording.State.STARTED + + +def test_get_recording_rest_required_fields(request_type=service.GetRecordingRequest): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_recording._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_recording._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Recording() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Recording.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_recording(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_recording_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_recording._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_recording_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_get_recording" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_get_recording" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetRecordingRequest.pb(service.GetRecordingRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Recording.to_json(resource.Recording()) + + request = service.GetRecordingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Recording() + + client.get_recording( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_recording_rest_bad_request( + transport: str = "rest", request_type=service.GetRecordingRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/recordings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_recording(request) + + +def test_get_recording_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Recording() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "conferenceRecords/sample1/recordings/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Recording.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_recording(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*/recordings/*}" + % client.transport._host, + args[1], + ) + + +def test_get_recording_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_recording( + service.GetRecordingRequest(), + name="name_value", + ) + + +def test_get_recording_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListRecordingsRequest, + dict, + ], +) +def test_list_recordings_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListRecordingsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListRecordingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_recordings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRecordingsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_recordings_rest_required_fields( + request_type=service.ListRecordingsRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_recordings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_recordings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListRecordingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListRecordingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_recordings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_recordings_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_recordings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_recordings_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_list_recordings" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_list_recordings" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListRecordingsRequest.pb(service.ListRecordingsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListRecordingsResponse.to_json( + service.ListRecordingsResponse() + ) + + request = service.ListRecordingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListRecordingsResponse() + + client.list_recordings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_recordings_rest_bad_request( + transport: str = "rest", request_type=service.ListRecordingsRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_recordings(request) + + +def test_list_recordings_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListRecordingsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "conferenceRecords/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListRecordingsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_recordings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=conferenceRecords/*}/recordings" + % client.transport._host, + args[1], + ) + + +def test_list_recordings_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_recordings( + service.ListRecordingsRequest(), + parent="parent_value", + ) + + +def test_list_recordings_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + resource.Recording(), + ], + next_page_token="abc", + ), + service.ListRecordingsResponse( + recordings=[], + next_page_token="def", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + ], + next_page_token="ghi", + ), + service.ListRecordingsResponse( + recordings=[ + resource.Recording(), + resource.Recording(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListRecordingsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "conferenceRecords/sample1"} + + pager = client.list_recordings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Recording) for i in results) + + pages = list(client.list_recordings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTranscriptRequest, + dict, + ], +) +def test_get_transcript_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/transcripts/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Transcript( + name="name_value", + state=resource.Transcript.State.STARTED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Transcript.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transcript(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Transcript) + assert response.name == "name_value" + assert response.state == resource.Transcript.State.STARTED + + +def test_get_transcript_rest_required_fields(request_type=service.GetTranscriptRequest): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transcript._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transcript._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Transcript() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Transcript.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transcript(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transcript_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transcript._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transcript_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetTranscriptRequest.pb(service.GetTranscriptRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Transcript.to_json(resource.Transcript()) + + request = service.GetTranscriptRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Transcript() + + client.get_transcript( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transcript_rest_bad_request( + transport: str = "rest", request_type=service.GetTranscriptRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "conferenceRecords/sample1/transcripts/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transcript(request) + + +def test_get_transcript_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Transcript() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "conferenceRecords/sample1/transcripts/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Transcript.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transcript(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*/transcripts/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transcript_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transcript( + service.GetTranscriptRequest(), + name="name_value", + ) + + +def test_get_transcript_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTranscriptsRequest, + dict, + ], +) +def test_list_transcripts_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListTranscriptsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transcripts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transcripts_rest_required_fields( + request_type=service.ListTranscriptsRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transcripts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transcripts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListTranscriptsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transcripts(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transcripts_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transcripts._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transcripts_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_list_transcripts" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_list_transcripts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListTranscriptsRequest.pb(service.ListTranscriptsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListTranscriptsResponse.to_json( + service.ListTranscriptsResponse() + ) + + request = service.ListTranscriptsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListTranscriptsResponse() + + client.list_transcripts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transcripts_rest_bad_request( + transport: str = "rest", request_type=service.ListTranscriptsRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transcripts(request) + + +def test_list_transcripts_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "conferenceRecords/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListTranscriptsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transcripts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=conferenceRecords/*}/transcripts" + % client.transport._host, + args[1], + ) + + +def test_list_transcripts_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transcripts( + service.ListTranscriptsRequest(), + parent="parent_value", + ) + + +def test_list_transcripts_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + resource.Transcript(), + ], + next_page_token="abc", + ), + service.ListTranscriptsResponse( + transcripts=[], + next_page_token="def", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + ], + next_page_token="ghi", + ), + service.ListTranscriptsResponse( + transcripts=[ + resource.Transcript(), + resource.Transcript(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListTranscriptsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "conferenceRecords/sample1"} + + pager = client.list_transcripts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Transcript) for i in results) + + pages = list(client.list_transcripts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetTranscriptEntryRequest, + dict, + ], +) +def test_get_transcript_entry_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "conferenceRecords/sample1/transcripts/sample2/entries/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.TranscriptEntry( + name="name_value", + participant="participant_value", + text="text_value", + language_code="language_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.TranscriptEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transcript_entry(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.TranscriptEntry) + assert response.name == "name_value" + assert response.participant == "participant_value" + assert response.text == "text_value" + assert response.language_code == "language_code_value" + + +def test_get_transcript_entry_rest_required_fields( + request_type=service.GetTranscriptEntryRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transcript_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transcript_entry._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.TranscriptEntry() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.TranscriptEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transcript_entry(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transcript_entry_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transcript_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transcript_entry_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript_entry" + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript_entry" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetTranscriptEntryRequest.pb( + service.GetTranscriptEntryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.TranscriptEntry.to_json( + resource.TranscriptEntry() + ) + + request = service.GetTranscriptEntryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.TranscriptEntry() + + client.get_transcript_entry( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transcript_entry_rest_bad_request( + transport: str = "rest", request_type=service.GetTranscriptEntryRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "conferenceRecords/sample1/transcripts/sample2/entries/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transcript_entry(request) + + +def test_get_transcript_entry_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.TranscriptEntry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "conferenceRecords/sample1/transcripts/sample2/entries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.TranscriptEntry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transcript_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=conferenceRecords/*/transcripts/*/entries/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transcript_entry_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transcript_entry( + service.GetTranscriptEntryRequest(), + name="name_value", + ) + + +def test_get_transcript_entry_rest_error(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListTranscriptEntriesRequest, + dict, + ], +) +def test_list_transcript_entries_rest(request_type): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1/transcripts/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptEntriesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListTranscriptEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transcript_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTranscriptEntriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transcript_entries_rest_required_fields( + request_type=service.ListTranscriptEntriesRequest, +): + transport_class = transports.ConferenceRecordsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transcript_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transcript_entries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListTranscriptEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transcript_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transcript_entries_rest_unset_required_fields(): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transcript_entries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transcript_entries_rest_interceptors(null_interceptor): + transport = transports.ConferenceRecordsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConferenceRecordsServiceRestInterceptor(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_transcript_entries", + ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "pre_list_transcript_entries", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListTranscriptEntriesRequest.pb( + service.ListTranscriptEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListTranscriptEntriesResponse.to_json( + service.ListTranscriptEntriesResponse() + ) + + request = service.ListTranscriptEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListTranscriptEntriesResponse() + + client.list_transcript_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transcript_entries_rest_bad_request( + transport: str = "rest", request_type=service.ListTranscriptEntriesRequest +): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "conferenceRecords/sample1/transcripts/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transcript_entries(request) + + +def test_list_transcript_entries_rest_flattened(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListTranscriptEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "conferenceRecords/sample1/transcripts/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListTranscriptEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transcript_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=conferenceRecords/*/transcripts/*}/entries" + % client.transport._host, + args[1], + ) + + +def test_list_transcript_entries_rest_flattened_error(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transcript_entries( + service.ListTranscriptEntriesRequest(), + parent="parent_value", + ) + + +def test_list_transcript_entries_rest_pager(transport: str = "rest"): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + next_page_token="abc", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[], + next_page_token="def", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + ], + next_page_token="ghi", + ), + service.ListTranscriptEntriesResponse( + transcript_entries=[ + resource.TranscriptEntry(), + resource.TranscriptEntry(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListTranscriptEntriesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "conferenceRecords/sample1/transcripts/sample2"} + + pager = client.list_transcript_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.TranscriptEntry) for i in results) + + pages = list(client.list_transcript_entries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConferenceRecordsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConferenceRecordsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConferenceRecordsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConferenceRecordsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConferenceRecordsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConferenceRecordsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConferenceRecordsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + transports.ConferenceRecordsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ConferenceRecordsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConferenceRecordsServiceGrpcTransport, + ) + + +def test_conference_records_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConferenceRecordsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_conference_records_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.apps.meet_v2beta.services.conference_records_service.transports.ConferenceRecordsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConferenceRecordsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_conference_record", + "list_conference_records", + "get_participant", + "list_participants", + "get_participant_session", + "list_participant_sessions", + "get_recording", + "list_recordings", + "get_transcript", + "list_transcripts", + "get_transcript_entry", + "list_transcript_entries", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_conference_records_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.apps.meet_v2beta.services.conference_records_service.transports.ConferenceRecordsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConferenceRecordsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_conference_records_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.apps.meet_v2beta.services.conference_records_service.transports.ConferenceRecordsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConferenceRecordsServiceTransport() + adc.assert_called_once() + + +def test_conference_records_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConferenceRecordsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + ], +) +def test_conference_records_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + transports.ConferenceRecordsServiceRestTransport, + ], +) +def test_conference_records_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConferenceRecordsServiceGrpcTransport, grpc_helpers), + (transports.ConferenceRecordsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_conference_records_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "meet.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="meet.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + ], +) +def test_conference_records_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_conference_records_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ConferenceRecordsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_conference_records_service_host_no_port(transport_name): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="meet.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_conference_records_service_host_with_port(transport_name): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="meet.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "meet.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_conference_records_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ConferenceRecordsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ConferenceRecordsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_conference_record._session + session2 = client2.transport.get_conference_record._session + assert session1 != session2 + session1 = client1.transport.list_conference_records._session + session2 = client2.transport.list_conference_records._session + assert session1 != session2 + session1 = client1.transport.get_participant._session + session2 = client2.transport.get_participant._session + assert session1 != session2 + session1 = client1.transport.list_participants._session + session2 = client2.transport.list_participants._session + assert session1 != session2 + session1 = client1.transport.get_participant_session._session + session2 = client2.transport.get_participant_session._session + assert session1 != session2 + session1 = client1.transport.list_participant_sessions._session + session2 = client2.transport.list_participant_sessions._session + assert session1 != session2 + session1 = client1.transport.get_recording._session + session2 = client2.transport.get_recording._session + assert session1 != session2 + session1 = client1.transport.list_recordings._session + session2 = client2.transport.list_recordings._session + assert session1 != session2 + session1 = client1.transport.get_transcript._session + session2 = client2.transport.get_transcript._session + assert session1 != session2 + session1 = client1.transport.list_transcripts._session + session2 = client2.transport.list_transcripts._session + assert session1 != session2 + session1 = client1.transport.get_transcript_entry._session + session2 = client2.transport.get_transcript_entry._session + assert session1 != session2 + session1 = client1.transport.list_transcript_entries._session + session2 = client2.transport.list_transcript_entries._session + assert session1 != session2 + + +def test_conference_records_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConferenceRecordsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_conference_records_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConferenceRecordsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + ], +) +def test_conference_records_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConferenceRecordsServiceGrpcTransport, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + ], +) +def test_conference_records_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_conference_record_path(): + conference_record = "squid" + expected = "conferenceRecords/{conference_record}".format( + conference_record=conference_record, + ) + actual = ConferenceRecordsServiceClient.conference_record_path(conference_record) + assert expected == actual + + +def test_parse_conference_record_path(): + expected = { + "conference_record": "clam", + } + path = ConferenceRecordsServiceClient.conference_record_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_conference_record_path(path) + assert expected == actual + + +def test_participant_path(): + conference_record = "whelk" + participant = "octopus" + expected = ( + "conferenceRecords/{conference_record}/participants/{participant}".format( + conference_record=conference_record, + participant=participant, + ) + ) + actual = ConferenceRecordsServiceClient.participant_path( + conference_record, participant + ) + assert expected == actual + + +def test_parse_participant_path(): + expected = { + "conference_record": "oyster", + "participant": "nudibranch", + } + path = ConferenceRecordsServiceClient.participant_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_participant_path(path) + assert expected == actual + + +def test_participant_session_path(): + conference_record = "cuttlefish" + participant = "mussel" + participant_session = "winkle" + expected = "conferenceRecords/{conference_record}/participants/{participant}/participantSessions/{participant_session}".format( + conference_record=conference_record, + participant=participant, + participant_session=participant_session, + ) + actual = ConferenceRecordsServiceClient.participant_session_path( + conference_record, participant, participant_session + ) + assert expected == actual + + +def test_parse_participant_session_path(): + expected = { + "conference_record": "nautilus", + "participant": "scallop", + "participant_session": "abalone", + } + path = ConferenceRecordsServiceClient.participant_session_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_participant_session_path(path) + assert expected == actual + + +def test_recording_path(): + conference_record = "squid" + recording = "clam" + expected = "conferenceRecords/{conference_record}/recordings/{recording}".format( + conference_record=conference_record, + recording=recording, + ) + actual = ConferenceRecordsServiceClient.recording_path(conference_record, recording) + assert expected == actual + + +def test_parse_recording_path(): + expected = { + "conference_record": "whelk", + "recording": "octopus", + } + path = ConferenceRecordsServiceClient.recording_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_recording_path(path) + assert expected == actual + + +def test_space_path(): + space = "oyster" + expected = "spaces/{space}".format( + space=space, + ) + actual = ConferenceRecordsServiceClient.space_path(space) + assert expected == actual + + +def test_parse_space_path(): + expected = { + "space": "nudibranch", + } + path = ConferenceRecordsServiceClient.space_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_space_path(path) + assert expected == actual + + +def test_transcript_path(): + conference_record = "cuttlefish" + transcript = "mussel" + expected = "conferenceRecords/{conference_record}/transcripts/{transcript}".format( + conference_record=conference_record, + transcript=transcript, + ) + actual = ConferenceRecordsServiceClient.transcript_path( + conference_record, transcript + ) + assert expected == actual + + +def test_parse_transcript_path(): + expected = { + "conference_record": "winkle", + "transcript": "nautilus", + } + path = ConferenceRecordsServiceClient.transcript_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_transcript_path(path) + assert expected == actual + + +def test_transcript_entry_path(): + conference_record = "scallop" + transcript = "abalone" + entry = "squid" + expected = "conferenceRecords/{conference_record}/transcripts/{transcript}/entries/{entry}".format( + conference_record=conference_record, + transcript=transcript, + entry=entry, + ) + actual = ConferenceRecordsServiceClient.transcript_entry_path( + conference_record, transcript, entry + ) + assert expected == actual + + +def test_parse_transcript_entry_path(): + expected = { + "conference_record": "clam", + "transcript": "whelk", + "entry": "octopus", + } + path = ConferenceRecordsServiceClient.transcript_entry_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_transcript_entry_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConferenceRecordsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ConferenceRecordsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ConferenceRecordsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ConferenceRecordsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ConferenceRecordsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ConferenceRecordsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ConferenceRecordsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ConferenceRecordsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ConferenceRecordsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ConferenceRecordsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConferenceRecordsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConferenceRecordsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConferenceRecordsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConferenceRecordsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConferenceRecordsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ConferenceRecordsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + ConferenceRecordsServiceClient, + transports.ConferenceRecordsServiceGrpcTransport, + ), + ( + ConferenceRecordsServiceAsyncClient, + transports.ConferenceRecordsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py new file mode 100644 index 000000000000..285e5f3d0e8c --- /dev/null +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py @@ -0,0 +1,3495 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.apps.meet_v2beta.services.spaces_service import ( + SpacesServiceAsyncClient, + SpacesServiceClient, + transports, +) +from google.apps.meet_v2beta.types import resource, service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SpacesServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SpacesServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SpacesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SpacesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SpacesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SpacesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SpacesServiceClient, "grpc"), + (SpacesServiceAsyncClient, "grpc_asyncio"), + (SpacesServiceClient, "rest"), + ], +) +def test_spaces_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SpacesServiceGrpcTransport, "grpc"), + (transports.SpacesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SpacesServiceRestTransport, "rest"), + ], +) +def test_spaces_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SpacesServiceClient, "grpc"), + (SpacesServiceAsyncClient, "grpc_asyncio"), + (SpacesServiceClient, "rest"), + ], +) +def test_spaces_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +def test_spaces_service_client_get_transport_class(): + transport = SpacesServiceClient.get_transport_class() + available_transports = [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceRestTransport, + ] + assert transport in available_transports + + transport = SpacesServiceClient.get_transport_class("grpc") + assert transport == transports.SpacesServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc"), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SpacesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceClient), +) +@mock.patch.object( + SpacesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceAsyncClient), +) +def test_spaces_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SpacesServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SpacesServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc", "true"), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc", "false"), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest", "true"), + (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SpacesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceClient), +) +@mock.patch.object( + SpacesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_spaces_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SpacesServiceClient, SpacesServiceAsyncClient] +) +@mock.patch.object( + SpacesServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceClient), +) +@mock.patch.object( + SpacesServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SpacesServiceAsyncClient), +) +def test_spaces_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SpacesServiceClient, transports.SpacesServiceGrpcTransport, "grpc"), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest"), + ], +) +def test_spaces_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SpacesServiceClient, + transports.SpacesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SpacesServiceClient, transports.SpacesServiceRestTransport, "rest", None), + ], +) +def test_spaces_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_spaces_service_client_client_options_from_dict(): + with mock.patch( + "google.apps.meet_v2beta.services.spaces_service.transports.SpacesServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SpacesServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SpacesServiceClient, + transports.SpacesServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SpacesServiceAsyncClient, + transports.SpacesServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_spaces_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "meet.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="meet.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSpaceRequest, + dict, + ], +) +def test_create_space(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + response = client.create_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +def test_create_space_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + client.create_space() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSpaceRequest() + + +@pytest.mark.asyncio +async def test_create_space_async( + transport: str = "grpc_asyncio", request_type=service.CreateSpaceRequest +): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + response = await client.create_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.asyncio +async def test_create_space_async_from_dict(): + await test_create_space_async(request_type=dict) + + +def test_create_space_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_space( + space=resource.Space(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].space + mock_val = resource.Space(name="name_value") + assert arg == mock_val + + +def test_create_space_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_space( + service.CreateSpaceRequest(), + space=resource.Space(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_space_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Space()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_space( + space=resource.Space(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].space + mock_val = resource.Space(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_space_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_space( + service.CreateSpaceRequest(), + space=resource.Space(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSpaceRequest, + dict, + ], +) +def test_get_space(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + response = client.get_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +def test_get_space_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + client.get_space() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSpaceRequest() + + +@pytest.mark.asyncio +async def test_get_space_async( + transport: str = "grpc_asyncio", request_type=service.GetSpaceRequest +): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + response = await client.get_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.asyncio +async def test_get_space_async_from_dict(): + await test_get_space_async(request_type=dict) + + +def test_get_space_field_headers(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSpaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + call.return_value = resource.Space() + client.get_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_space_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSpaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Space()) + await client.get_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_space_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_space( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_space_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space( + service.GetSpaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_space_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Space()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_space( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_space_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_space( + service.GetSpaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSpaceRequest, + dict, + ], +) +def test_update_space(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + response = client.update_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +def test_update_space_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + client.update_space() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSpaceRequest() + + +@pytest.mark.asyncio +async def test_update_space_async( + transport: str = "grpc_asyncio", request_type=service.UpdateSpaceRequest +): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + response = await client.update_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSpaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.asyncio +async def test_update_space_async_from_dict(): + await test_update_space_async(request_type=dict) + + +def test_update_space_field_headers(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSpaceRequest() + + request.space.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + call.return_value = resource.Space() + client.update_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "space.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_space_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSpaceRequest() + + request.space.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Space()) + await client.update_space(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "space.name=name_value", + ) in kw["metadata"] + + +def test_update_space_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_space( + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].space + mock_val = resource.Space(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_space_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space( + service.UpdateSpaceRequest(), + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_space_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Space() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Space()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_space( + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].space + mock_val = resource.Space(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_space_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_space( + service.UpdateSpaceRequest(), + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.EndActiveConferenceRequest, + dict, + ], +) +def test_end_active_conference(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.end_active_conference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.EndActiveConferenceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_end_active_conference_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + client.end_active_conference() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.EndActiveConferenceRequest() + + +@pytest.mark.asyncio +async def test_end_active_conference_async( + transport: str = "grpc_asyncio", request_type=service.EndActiveConferenceRequest +): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.end_active_conference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.EndActiveConferenceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_end_active_conference_async_from_dict(): + await test_end_active_conference_async(request_type=dict) + + +def test_end_active_conference_field_headers(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.EndActiveConferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + call.return_value = None + client.end_active_conference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_end_active_conference_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.EndActiveConferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.end_active_conference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_end_active_conference_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.end_active_conference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_end_active_conference_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.end_active_conference( + service.EndActiveConferenceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_end_active_conference_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.end_active_conference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_end_active_conference_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.end_active_conference( + service.EndActiveConferenceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSpaceRequest, + dict, + ], +) +def test_create_space_rest(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["space"] = { + "name": "name_value", + "meeting_uri": "meeting_uri_value", + "meeting_code": "meeting_code_value", + "config": {"access_type": 1, "entry_point_access": 1}, + "active_conference": {"conference_record": "conference_record_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSpaceRequest.meta.fields["space"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_create_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_create_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.CreateSpaceRequest.pb(service.CreateSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Space.to_json(resource.Space()) + + request = service.CreateSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + + client.create_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_space_rest_bad_request( + transport: str = "rest", request_type=service.CreateSpaceRequest +): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_space(request) + + +def test_create_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + space=resource.Space(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/spaces" % client.transport._host, args[1] + ) + + +def test_create_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_space( + service.CreateSpaceRequest(), + space=resource.Space(name="name_value"), + ) + + +def test_create_space_rest_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSpaceRequest, + dict, + ], +) +def test_get_space_rest(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +def test_get_space_rest_required_fields(request_type=service.GetSpaceRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Space() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_space(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_space_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_get_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_get_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetSpaceRequest.pb(service.GetSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Space.to_json(resource.Space()) + + request = service.GetSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + + client.get_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_space_rest_bad_request( + transport: str = "rest", request_type=service.GetSpaceRequest +): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_space(request) + + +def test_get_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*}" % client.transport._host, args[1] + ) + + +def test_get_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space( + service.GetSpaceRequest(), + name="name_value", + ) + + +def test_get_space_rest_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSpaceRequest, + dict, + ], +) +def test_update_space_rest(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"space": {"name": "spaces/sample1"}} + request_init["space"] = { + "name": "spaces/sample1", + "meeting_uri": "meeting_uri_value", + "meeting_code": "meeting_code_value", + "config": {"access_type": 1, "entry_point_access": 1}, + "active_conference": {"conference_record": "conference_record_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateSpaceRequest.meta.fields["space"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +def test_update_space_rest_required_fields(request_type=service.UpdateSpaceRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Space() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_space(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_space_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_update_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_update_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.UpdateSpaceRequest.pb(service.UpdateSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resource.Space.to_json(resource.Space()) + + request = service.UpdateSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + + client.update_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_space_rest_bad_request( + transport: str = "rest", request_type=service.UpdateSpaceRequest +): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"space": {"name": "spaces/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_space(request) + + +def test_update_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {"space": {"name": "spaces/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{space.name=spaces/*}" % client.transport._host, args[1] + ) + + +def test_update_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space( + service.UpdateSpaceRequest(), + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_space_rest_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.EndActiveConferenceRequest, + dict, + ], +) +def test_end_active_conference_rest(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.end_active_conference(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_end_active_conference_rest_required_fields( + request_type=service.EndActiveConferenceRequest, +): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).end_active_conference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).end_active_conference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.end_active_conference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_end_active_conference_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.end_active_conference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_end_active_conference_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_end_active_conference" + ) as pre: + pre.assert_not_called() + pb_message = service.EndActiveConferenceRequest.pb( + service.EndActiveConferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = service.EndActiveConferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.end_active_conference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_end_active_conference_rest_bad_request( + transport: str = "rest", request_type=service.EndActiveConferenceRequest +): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.end_active_conference(request) + + +def test_end_active_conference_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.end_active_conference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*}:endActiveConference" % client.transport._host, + args[1], + ) + + +def test_end_active_conference_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.end_active_conference( + service.EndActiveConferenceRequest(), + name="name_value", + ) + + +def test_end_active_conference_rest_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SpacesServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SpacesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + transports.SpacesServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SpacesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SpacesServiceGrpcTransport, + ) + + +def test_spaces_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SpacesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_spaces_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.apps.meet_v2beta.services.spaces_service.transports.SpacesServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SpacesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_space", + "get_space", + "update_space", + "end_active_conference", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_spaces_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.apps.meet_v2beta.services.spaces_service.transports.SpacesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpacesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_spaces_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.apps.meet_v2beta.services.spaces_service.transports.SpacesServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpacesServiceTransport() + adc.assert_called_once() + + +def test_spaces_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SpacesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + ], +) +def test_spaces_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + transports.SpacesServiceRestTransport, + ], +) +def test_spaces_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SpacesServiceGrpcTransport, grpc_helpers), + (transports.SpacesServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_spaces_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "meet.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="meet.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + ], +) +def test_spaces_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_spaces_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SpacesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_spaces_service_host_no_port(transport_name): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="meet.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "meet.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_spaces_service_host_with_port(transport_name): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="meet.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "meet.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://meet.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_spaces_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SpacesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SpacesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_space._session + session2 = client2.transport.create_space._session + assert session1 != session2 + session1 = client1.transport.get_space._session + session2 = client2.transport.get_space._session + assert session1 != session2 + session1 = client1.transport.update_space._session + session2 = client2.transport.update_space._session + assert session1 != session2 + session1 = client1.transport.end_active_conference._session + session2 = client2.transport.end_active_conference._session + assert session1 != session2 + + +def test_spaces_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpacesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_spaces_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpacesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + ], +) +def test_spaces_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + ], +) +def test_spaces_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_conference_record_path(): + conference_record = "squid" + expected = "conferenceRecords/{conference_record}".format( + conference_record=conference_record, + ) + actual = SpacesServiceClient.conference_record_path(conference_record) + assert expected == actual + + +def test_parse_conference_record_path(): + expected = { + "conference_record": "clam", + } + path = SpacesServiceClient.conference_record_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_conference_record_path(path) + assert expected == actual + + +def test_space_path(): + space = "whelk" + expected = "spaces/{space}".format( + space=space, + ) + actual = SpacesServiceClient.space_path(space) + assert expected == actual + + +def test_parse_space_path(): + expected = { + "space": "octopus", + } + path = SpacesServiceClient.space_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_space_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SpacesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = SpacesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SpacesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = SpacesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SpacesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = SpacesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = SpacesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = SpacesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SpacesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = SpacesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SpacesServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SpacesServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SpacesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SpacesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SpacesServiceClient, transports.SpacesServiceGrpcTransport), + (SpacesServiceAsyncClient, transports.SpacesServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-advisorynotifications/CHANGELOG.md b/packages/google-cloud-advisorynotifications/CHANGELOG.md index b69eab338f39..adc22ece4b3c 100644 --- a/packages/google-cloud-advisorynotifications/CHANGELOG.md +++ b/packages/google-cloud-advisorynotifications/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.3.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.2...google-cloud-advisorynotifications-v0.3.3) (2023-12-07) + + +### Features + +* Adding GetNotification and ListNotifications methods for notifications parented at the project level ([d250ab3](https://github.com/googleapis/google-cloud-python/commit/d250ab3f1c9ed29a530360899445f2d8714fc157)) +* Adding project level methods to advisorynotifications.googleapis.com ([d250ab3](https://github.com/googleapis/google-cloud-python/commit/d250ab3f1c9ed29a530360899445f2d8714fc157)) + + +### Documentation + +* Adding docs for new project level methods ([d250ab3](https://github.com/googleapis/google-cloud-python/commit/d250ab3f1c9ed29a530360899445f2d8714fc157)) + ## [0.3.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.1...google-cloud-advisorynotifications-v0.3.2) (2023-12-07) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py index 78e859312100..a01b131351cc 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.2" # {x-release-please-version} +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py index 78e859312100..a01b131351cc 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.2" # {x-release-please-version} +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py index 4e6dc1692cc6..47b8c7eab812 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py @@ -279,7 +279,9 @@ async def sample_list_notifications(): Required. The parent, which owns this collection of notifications. Must be of the form - "organizations/{organization}/locations/{location}". + "organizations/{organization}/locations/{location}" + or + "projects/{project}/locations/{location}" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -403,7 +405,9 @@ async def sample_get_notification(): Required. A name of the notification to retrieve. Format: - organizations/{organization}/locations/{location}/notifications/{notification}. + organizations/{organization}/locations/{location}/notifications/{notification} + or + projects/{projects}/locations/{location}/notifications/{notification}. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index 206cc2793c33..342e341d8929 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -517,7 +517,9 @@ def sample_list_notifications(): Required. The parent, which owns this collection of notifications. Must be of the form - "organizations/{organization}/locations/{location}". + "organizations/{organization}/locations/{location}" + or + "projects/{project}/locations/{location}" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -632,7 +634,9 @@ def sample_get_notification(): Required. A name of the notification to retrieve. Format: - organizations/{organization}/locations/{location}/notifications/{notification}. + organizations/{organization}/locations/{location}/notifications/{notification} + or + projects/{projects}/locations/{location}/notifications/{notification}. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py index 5e6bca71b7e6..3fa6d6544ea8 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py @@ -330,6 +330,10 @@ def __call__( "method": "get", "uri": "/v1/{name=organizations/*/locations/*/notifications/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/notifications/*}", + }, ] request, metadata = self._interceptor.pre_get_notification( request, metadata @@ -507,6 +511,10 @@ def __call__( "method": "get", "uri": "/v1/{parent=organizations/*/locations/*}/notifications", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/notifications", + }, ] request, metadata = self._interceptor.pre_list_notifications( request, metadata diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py index 3ff314dd441b..296543a55cfb 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py @@ -123,7 +123,9 @@ class Notification(proto.Message): The resource name of the notification. Format: - organizations/{organization}/locations/{location}/notifications/{notification}. + organizations/{organization}/locations/{location}/notifications/{notification} + or + projects/{project}/locations/{location}/notifications/{notification}. subject (google.cloud.advisorynotifications_v1.types.Subject): The subject line of the notification. messages (MutableSequence[google.cloud.advisorynotifications_v1.types.Message]): @@ -327,7 +329,8 @@ class ListNotificationsRequest(proto.Message): parent (str): Required. The parent, which owns this collection of notifications. Must be of the form - "organizations/{organization}/locations/{location}". + "organizations/{organization}/locations/{location}" + or "projects/{project}/locations/{location}". page_size (int): The maximum number of notifications to return. The service may return fewer than this @@ -418,7 +421,9 @@ class GetNotificationRequest(proto.Message): Required. A name of the notification to retrieve. Format: - organizations/{organization}/locations/{location}/notifications/{notification}. + organizations/{organization}/locations/{location}/notifications/{notification} + or + projects/{projects}/locations/{location}/notifications/{notification}. language_code (str): ISO code for requested localization language. If unset, will be interpereted as "en". If the diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 8d25b4a76abe..eb033ed94522 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-advisorynotifications", - "version": "0.3.2" + "version": "0.3.3" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index 3138b2f9a1b4..466426585550 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.3.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.5...google-cloud-alloydb-v0.3.6) (2024-01-04) + + +### Features + +* added instance network config ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) +* added ListDatabases API and Database object ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) +* added PSC config, PSC interface config, PSC instance config ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) +* added two boolean fields satisfies_pzi and satisfies_pzs ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) +* changed field network in NetworkConfig from required to optional ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) + + +### Documentation + +* clarified read pool config is for read pool type instances ([bea1a52](https://github.com/googleapis/google-cloud-python/commit/bea1a52adf0717b7656764ac0f0f6f5fa13d0338)) + ## [0.3.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.4...google-cloud-alloydb-v0.3.5) (2023-12-07) diff --git a/packages/google-cloud-alloydb/alloydb-v1alpha-py.tar.gz b/packages/google-cloud-alloydb/alloydb-v1alpha-py.tar.gz new file mode 100644 index 000000000000..af4487a07be9 Binary files /dev/null and b/packages/google-cloud-alloydb/alloydb-v1alpha-py.tar.gz differ diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 288d10b11145..dca6dc837e67 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.3.6" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 288d10b11145..dca6dc837e67 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.3.6" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py index 268bb67e2bda..fdf7171eeb17 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py @@ -29,6 +29,7 @@ ContinuousBackupConfig, ContinuousBackupInfo, ContinuousBackupSource, + Database, DatabaseVersion, EncryptionConfig, EncryptionInfo, @@ -69,6 +70,8 @@ ListBackupsResponse, ListClustersRequest, ListClustersResponse, + ListDatabasesRequest, + ListDatabasesResponse, ListInstancesRequest, ListInstancesResponse, ListSupportedDatabaseFlagsRequest, @@ -108,6 +111,7 @@ "CreateSecondaryClusterRequest", "CreateSecondaryInstanceRequest", "CreateUserRequest", + "Database", "DatabaseVersion", "DeleteBackupRequest", "DeleteClusterRequest", @@ -130,6 +134,8 @@ "ListBackupsResponse", "ListClustersRequest", "ListClustersResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", "ListInstancesRequest", "ListInstancesResponse", "ListSupportedDatabaseFlagsRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json index 10300b9955c8..12158725f2f2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json @@ -115,6 +115,11 @@ "list_clusters" ] }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, "ListInstances": { "methods": [ "list_instances" @@ -275,6 +280,11 @@ "list_clusters" ] }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, "ListInstances": { "methods": [ "list_instances" @@ -435,6 +445,11 @@ "list_clusters" ] }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, "ListInstances": { "methods": [ "list_instances" diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 288d10b11145..dca6dc837e67 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.3.6" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 4e5c9634966a..644ea439a2ec 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -80,6 +80,8 @@ class AlloyDBAdminAsyncClient: parse_crypto_key_version_path = staticmethod( AlloyDBAdminClient.parse_crypto_key_version_path ) + database_path = staticmethod(AlloyDBAdminClient.database_path) + parse_database_path = staticmethod(AlloyDBAdminClient.parse_database_path) instance_path = staticmethod(AlloyDBAdminClient.instance_path) parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) network_path = staticmethod(AlloyDBAdminClient.network_path) @@ -4064,6 +4066,130 @@ async def sample_delete_user(): metadata=metadata, ) + async def list_databases( + self, + request: Optional[Union[service.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""Lists Databases in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_list_databases(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListDatabasesRequest, dict]]): + The request object. Message for requesting list of + Databases. + parent (:class:`str`): + Required. Parent value for + ListDatabasesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesAsyncPager: + Message for response to listing + Databases. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_databases, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index 7f7a61cfd125..e4a0d06b7528 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -279,6 +279,30 @@ def parse_crypto_key_version_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def database_path( + project: str, + location: str, + cluster: str, + database: str, + ) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}".format( + project=project, + location=location, + cluster=cluster, + database=database, + ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str, str]: + """Parses a database path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)/databases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def instance_path( project: str, @@ -4343,6 +4367,121 @@ def sample_delete_user(): metadata=metadata, ) + def list_databases( + self, + request: Optional[Union[service.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesPager: + r"""Lists Databases in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_list_databases(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.ListDatabasesRequest, dict]): + The request object. Message for requesting list of + Databases. + parent (str): + Required. Parent value for + ListDatabasesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesPager: + Message for response to listing + Databases. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListDatabasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListDatabasesRequest): + request = service.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AlloyDBAdminClient": return self diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py index a384488fe721..5909c083f267 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/pagers.py @@ -665,3 +665,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListDatabasesResponse], + request: service.ListDatabasesRequest, + response: service.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.alloydb_v1alpha.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.alloydb_v1alpha.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListDatabasesResponse]], + request: service.ListDatabasesRequest, + response: service.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.alloydb_v1alpha.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.alloydb_v1alpha.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py index ba185713b6d5..9bd409617f19 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py @@ -381,6 +381,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -677,6 +691,15 @@ def delete_user( ]: raise NotImplementedError() + @property + def list_databases( + self, + ) -> Callable[ + [service.ListDatabasesRequest], + Union[service.ListDatabasesResponse, Awaitable[service.ListDatabasesResponse]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py index b892d971527a..552acc989015 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py @@ -1084,6 +1084,32 @@ def delete_user(self) -> Callable[[service.DeleteUserRequest], empty_pb2.Empty]: ) return self._stubs["delete_user"] + @property + def list_databases( + self, + ) -> Callable[[service.ListDatabasesRequest], service.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + Lists Databases in a given project and location. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListDatabases", + request_serializer=service.ListDatabasesRequest.serialize, + response_deserializer=service.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py index 5e2c0db5521b..2855a1bf5991 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py @@ -1115,6 +1115,34 @@ def delete_user( ) return self._stubs["delete_user"] + @property + def list_databases( + self, + ) -> Callable[ + [service.ListDatabasesRequest], Awaitable[service.ListDatabasesResponse] + ]: + r"""Return a callable for the list databases method over gRPC. + + Lists Databases in a given project and location. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_databases" not in self._stubs: + self._stubs["list_databases"] = self.grpc_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/ListDatabases", + request_serializer=service.ListDatabasesRequest.serialize, + response_deserializer=service.ListDatabasesResponse.deserialize, + ) + return self._stubs["list_databases"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py index 7b232a2710da..bdfef34d066d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -239,6 +239,14 @@ def post_list_clusters(self, response): logging.log(f"Received response: {response}") return response + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_instances(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -761,6 +769,27 @@ def post_list_clusters( """ return response + def pre_list_databases( + self, request: service.ListDatabasesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.ListDatabasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_list_databases( + self, response: service.ListDatabasesResponse + ) -> service.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + Override in a subclass to manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. + """ + return response + def pre_list_instances( self, request: service.ListInstancesRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[service.ListInstancesRequest, Sequence[Tuple[str, str]]]: @@ -3211,6 +3240,95 @@ def __call__( resp = self._interceptor.post_list_clusters(resp) return resp + class _ListDatabases(AlloyDBAdminRestStub): + def __hash__(self): + return hash("ListDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.ListDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.service.ListDatabasesRequest): + The request object. Message for requesting list of + Databases. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.ListDatabasesResponse: + Message for response to listing + Databases. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/databases", + }, + ] + request, metadata = self._interceptor.pre_list_databases(request, metadata) + pb_request = service.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.ListDatabasesResponse() + pb_resp = service.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_databases(resp) + return resp + class _ListInstances(AlloyDBAdminRestStub): def __hash__(self): return hash("ListInstances") @@ -4314,6 +4432,14 @@ def list_clusters( # In C++ this would require a dynamic_cast return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + @property + def list_databases( + self, + ) -> Callable[[service.ListDatabasesRequest], service.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + @property def list_instances( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py index 3b24cd81f761..a9314ac6676e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ ContinuousBackupConfig, ContinuousBackupInfo, ContinuousBackupSource, + Database, DatabaseVersion, EncryptionConfig, EncryptionInfo, @@ -63,6 +64,8 @@ ListBackupsResponse, ListClustersRequest, ListClustersResponse, + ListDatabasesRequest, + ListDatabasesResponse, ListInstancesRequest, ListInstancesResponse, ListSupportedDatabaseFlagsRequest, @@ -88,6 +91,7 @@ "ContinuousBackupConfig", "ContinuousBackupInfo", "ContinuousBackupSource", + "Database", "EncryptionConfig", "EncryptionInfo", "Instance", @@ -127,6 +131,8 @@ "ListBackupsResponse", "ListClustersRequest", "ListClustersResponse", + "ListDatabasesRequest", + "ListDatabasesResponse", "ListInstancesRequest", "ListInstancesResponse", "ListSupportedDatabaseFlagsRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index 47fbf1f21a66..6dccd47d271a 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -46,6 +46,7 @@ "Backup", "SupportedDatabaseFlag", "User", + "Database", }, ) @@ -749,8 +750,13 @@ class Cluster(proto.Message): primary_config (google.cloud.alloydb_v1alpha.types.Cluster.PrimaryConfig): Output only. Cross Region replication config specific to PRIMARY cluster. + satisfies_pzi (bool): + Output only. Reserved for future use. satisfies_pzs (bool): - Reserved for future use. + Output only. Reserved for future use. + psc_config (google.cloud.alloydb_v1alpha.types.Cluster.PscConfig): + Optional. The configuration for Private + Service Connect (PSC) for the cluster. """ class State(proto.Enum): @@ -825,7 +831,7 @@ class NetworkConfig(proto.Message): Attributes: network (str): - Required. The resource link for the VPC network in which + Optional. The resource link for the VPC network in which cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: @@ -838,8 +844,8 @@ class NetworkConfig(proto.Message): for this cluster will be created in the allocated range. The range name must comply with RFC 1035. Specifically, the name must be 1-63 characters long and match the regular - expression `a-z <[-a-z0-9]*[a-z0-9]>`__?. Field name is - intended to be consistent with CloudSQL. + expression ``[a-z]([-a-z0-9]*[a-z0-9])?``. Field name is + intended to be consistent with Cloud SQL. """ network: str = proto.Field( @@ -883,6 +889,22 @@ class PrimaryConfig(proto.Message): number=1, ) + class PscConfig(proto.Message): + r"""PscConfig contains PSC related configuration at a cluster + level. + + Attributes: + psc_enabled (bool): + Optional. Create an instance that allows + connections from Private Service Connect + endpoints to the instance. + """ + + psc_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + backup_source: "BackupSource" = proto.Field( proto.MESSAGE, number=15, @@ -1009,10 +1031,19 @@ class PrimaryConfig(proto.Message): number=23, message=PrimaryConfig, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=33, + ) satisfies_pzs: bool = proto.Field( proto.BOOL, number=30, ) + psc_config: PscConfig = proto.Field( + proto.MESSAGE, + number=31, + message=PscConfig, + ) class Instance(proto.Message): @@ -1102,7 +1133,8 @@ class Instance(proto.Message): query_insights_config (google.cloud.alloydb_v1alpha.types.Instance.QueryInsightsInstanceConfig): Configuration for query insights. read_pool_config (google.cloud.alloydb_v1alpha.types.Instance.ReadPoolConfig): - Read pool specific config. + Read pool instance configuration. This is required if the + value of instanceType is READ_POOL. ip_address (str): Output only. The IP address for the Instance. This is the connection endpoint for an end-user @@ -1133,8 +1165,16 @@ class Instance(proto.Message): client_connection_config (google.cloud.alloydb_v1alpha.types.Instance.ClientConnectionConfig): Optional. Client connection specific configurations + satisfies_pzi (bool): + Output only. Reserved for future use. satisfies_pzs (bool): - Reserved for future use. + Output only. Reserved for future use. + psc_instance_config (google.cloud.alloydb_v1alpha.types.Instance.PscInstanceConfig): + Optional. The configuration for Private + Service Connect (PSC) for the instance. + network_config (google.cloud.alloydb_v1alpha.types.Instance.InstanceNetworkConfig): + Optional. Instance level network + configuration. """ class State(proto.Enum): @@ -1397,6 +1437,141 @@ class ClientConnectionConfig(proto.Message): message="SslConfig", ) + class PscInterfaceConfig(proto.Message): + r"""Configuration for setting up a PSC interface. This + information needs to be provided by the customer. + PSC interfaces will be created and added to VMs via SLM (adding + a network interface will require recreating the VM). For HA + instances this will be done via LDTM. + + Attributes: + consumer_endpoint_ips (MutableSequence[str]): + A list of endpoints in the consumer VPC the + interface might initiate outbound connections + to. This list has to be provided when the PSC + interface is created. + network_attachment (str): + The NetworkAttachment resource created in the consumer VPC + to which the PSC interface will be linked, in the form of: + "projects/${CONSUMER_PROJECT}/regions/${REGION}/networkAttachments/${NETWORK_ATTACHMENT_NAME}". + NetworkAttachment has to be provided when the PSC interface + is created. + """ + + consumer_endpoint_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=2, + ) + + class PscInstanceConfig(proto.Message): + r"""PscInstanceConfig contains PSC related configuration at an + instance level. + + Attributes: + service_attachment_link (str): + Output only. The service attachment created + when Private Service Connect (PSC) is enabled + for the instance. The name of the resource will + be in the format of + projects//regions//serviceAttachments/ + allowed_consumer_projects (MutableSequence[str]): + Optional. List of consumer projects that are + allowed to create PSC endpoints to + service-attachments to this instance. + allowed_consumer_networks (MutableSequence[str]): + Optional. List of consumer networks that are + allowed to create PSC endpoints to + service-attachments to this instance. + psc_interface_configs (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance.PscInterfaceConfig]): + Optional. Configurations for setting up PSC + interfaces attached to the instance which are + used for outbound connectivity. Only primary + instances can have PSC interface attached. All + the VMs created for the primary instance will + share the same configurations. Currently we only + support 0 or 1 PSC interface. + outgoing_service_attachment_links (MutableSequence[str]): + Optional. List of service attachments that + this instance has created endpoints to connect + with. Currently, only a single outgoing service + attachment is supported per instance. + psc_enabled (bool): + Optional. Whether PSC connectivity is enabled + for this instance. This is populated by + referencing the value from the parent cluster. + """ + + service_attachment_link: str = proto.Field( + proto.STRING, + number=1, + ) + allowed_consumer_projects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + allowed_consumer_networks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + psc_interface_configs: MutableSequence[ + "Instance.PscInterfaceConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Instance.PscInterfaceConfig", + ) + outgoing_service_attachment_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + psc_enabled: bool = proto.Field( + proto.BOOL, + number=6, + ) + + class InstanceNetworkConfig(proto.Message): + r"""Metadata related to instance level network configuration. + + Attributes: + authorized_external_networks (MutableSequence[google.cloud.alloydb_v1alpha.types.Instance.InstanceNetworkConfig.AuthorizedNetwork]): + Optional. A list of external network + authorized to access this instance. + enable_public_ip (bool): + Optional. Enabling public ip for the + instance. + """ + + class AuthorizedNetwork(proto.Message): + r"""AuthorizedNetwork contains metadata for an authorized + network. + + Attributes: + cidr_range (str): + CIDR range for one authorzied network of the + instance. + """ + + cidr_range: str = proto.Field( + proto.STRING, + number=1, + ) + + authorized_external_networks: MutableSequence[ + "Instance.InstanceNetworkConfig.AuthorizedNetwork" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance.InstanceNetworkConfig.AuthorizedNetwork", + ) + enable_public_ip: bool = proto.Field( + proto.BOOL, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1505,10 +1680,24 @@ class ClientConnectionConfig(proto.Message): number=23, message=ClientConnectionConfig, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=30, + ) satisfies_pzs: bool = proto.Field( proto.BOOL, number=24, ) + psc_instance_config: PscInstanceConfig = proto.Field( + proto.MESSAGE, + number=28, + message=PscInstanceConfig, + ) + network_config: InstanceNetworkConfig = proto.Field( + proto.MESSAGE, + number=29, + message=InstanceNetworkConfig, + ) class ConnectionInfo(proto.Message): @@ -1525,6 +1714,10 @@ class ConnectionInfo(proto.Message): Instance. This is the default IP for the instance and is always created (even if enable_public_ip is set). This is the connection endpoint for an end-user application. + public_ip_address (str): + Output only. The public IP addresses for the Instance. This + is available ONLY when enable_public_ip is set. This is the + connection endpoint for an end-user application. pem_certificate_chain (MutableSequence[str]): Output only. The pem-encoded chain that may be used to verify the X.509 certificate. @@ -1542,6 +1735,10 @@ class ConnectionInfo(proto.Message): proto.STRING, number=2, ) + public_ip_address: str = proto.Field( + proto.STRING, + number=5, + ) pem_certificate_chain: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, @@ -1636,8 +1833,10 @@ class Backup(proto.Message): policy. Once the expiry quantity is over retention, the backup is eligible to be garbage collected. + satisfies_pzi (bool): + Output only. Reserved for future use. satisfies_pzs (bool): - Reserved for future use. + Output only. Reserved for future use. database_version (google.cloud.alloydb_v1alpha.types.DatabaseVersion): Output only. The database engine major version of the cluster this backup was created @@ -1815,6 +2014,10 @@ class QuantityBasedExpiry(proto.Message): number=20, message=QuantityBasedExpiry, ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=23, + ) satisfies_pzs: bool = proto.Field( proto.BOOL, number=21, @@ -2033,4 +2236,37 @@ class UserType(proto.Enum): ) +class Database(proto.Message): + r"""Message describing Database object. + + Attributes: + name (str): + Identifier. Name of the resource in the form + of + projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}. + charset (str): + Optional. Charset for the database. This field can contain + any PostgreSQL supported charset name. Example values + include "UTF8", "SQL_ASCII", etc. + collation (str): + Optional. Collation for the database. + Name of the custom or native collation for + postgres. Example values include "C", "POSIX", + etc + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + charset: str = proto.Field( + proto.STRING, + number=2, + ) + collation: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py index 9f9aa79a6e48..aea34ac6664e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py @@ -70,6 +70,8 @@ "CreateUserRequest", "UpdateUserRequest", "DeleteUserRequest", + "ListDatabasesRequest", + "ListDatabasesResponse", }, ) @@ -1684,8 +1686,8 @@ class GenerateClientCertificateRequest(proto.Message): exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). pem_csr (str): - Optional. A pem-encoded X.509 certificate - signing request (CSR). + Optional. A pem-encoded X.509 certificate signing request + (CSR). It is recommended to use public_key instead. cert_duration (google.protobuf.duration_pb2.Duration): Optional. An optional hint to the endpoint to generate the client certificate with the @@ -2136,4 +2138,74 @@ class DeleteUserRequest(proto.Message): ) +class ListDatabasesRequest(proto.Message): + r"""Message for requesting list of Databases. + + Attributes: + parent (str): + Required. Parent value for + ListDatabasesRequest. + page_size (int): + Optional. The maximum number of databases to + return. The service may return fewer than this + value. If unspecified, an appropriate number of + databases will be returned. The max value will + be 2000, values above max will be coerced to + max. + page_token (str): + Optional. A page token, received from a previous + ``ListDatabases`` call. This should be provided to retrieve + the subsequent page. This field is currently not supported, + its value will be ignored if passed. + filter (str): + Optional. Filtering results. + This field is currently not supported, its value + will be ignored if passed. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabasesResponse(proto.Message): + r"""Message for response to listing Databases. + + Attributes: + databases (MutableSequence[google.cloud.alloydb_v1alpha.types.Database]): + The list of databases + next_page_token (str): + A token identifying the next page of results + the server should return. If this field is + omitted, there are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + databases: MutableSequence[resources.Database] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Database, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 288d10b11145..dca6dc837e67 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.5" # {x-release-please-version} +__version__ = "0.3.6" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_async.py new file mode 100644 index 000000000000..46ed7e2259f4 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_list_databases(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_sync.py new file mode 100644 index 000000000000..4abf1515e787 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_list_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_list_databases(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index ae14d8a9b2c2..2d469d1fcf8e 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.5" + "version": "0.3.6" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index ce190b99c4f2..c7387234c1b4 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.5" + "version": "0.3.6" }, "snippets": [ { @@ -3482,6 +3482,167 @@ ], "title": "alloydb_v1alpha_generated_alloy_db_admin_list_clusters_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.list_databases", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListDatabases", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesAsyncPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.list_databases", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.ListDatabases", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "alloydb_v1alpha_generated_alloy_db_admin_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_ListDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_list_databases_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index d5dd98f0416e..8082744e1b1e 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.5" + "version": "0.3.6" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py index 14b3ca7826ae..d6ed3ae54c7d 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py @@ -60,6 +60,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'inject_fault': ('fault_type', 'name', 'request_id', 'validate_only', ), 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_databases': ('parent', 'page_size', 'page_token', 'filter', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_supported_database_flags': ('parent', 'page_size', 'page_token', ), 'list_users': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 96c6d5b2b991..2cf2c30907e5 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -1165,6 +1165,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): network="network_value", etag="etag_value", reconciling=True, + satisfies_pzi=True, satisfies_pzs=True, ) response = client.get_cluster(request) @@ -1185,6 +1186,7 @@ def test_get_cluster(request_type, transport: str = "grpc"): assert response.network == "network_value" assert response.etag == "etag_value" assert response.reconciling is True + assert response.satisfies_pzi is True assert response.satisfies_pzs is True @@ -1231,6 +1233,7 @@ async def test_get_cluster_async( network="network_value", etag="etag_value", reconciling=True, + satisfies_pzi=True, satisfies_pzs=True, ) ) @@ -1252,6 +1255,7 @@ async def test_get_cluster_async( assert response.network == "network_value" assert response.etag == "etag_value" assert response.reconciling is True + assert response.satisfies_pzi is True assert response.satisfies_pzs is True @@ -3230,6 +3234,7 @@ def test_get_instance(request_type, transport: str = "grpc"): ip_address="ip_address_value", reconciling=True, etag="etag_value", + satisfies_pzi=True, satisfies_pzs=True, ) response = client.get_instance(request) @@ -3251,6 +3256,7 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.ip_address == "ip_address_value" assert response.reconciling is True assert response.etag == "etag_value" + assert response.satisfies_pzi is True assert response.satisfies_pzs is True @@ -3298,6 +3304,7 @@ async def test_get_instance_async( ip_address="ip_address_value", reconciling=True, etag="etag_value", + satisfies_pzi=True, satisfies_pzs=True, ) ) @@ -3320,6 +3327,7 @@ async def test_get_instance_async( assert response.ip_address == "ip_address_value" assert response.reconciling is True assert response.etag == "etag_value" + assert response.satisfies_pzi is True assert response.satisfies_pzs is True @@ -5749,6 +5757,7 @@ def test_get_backup(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", size_bytes=1089, + satisfies_pzi=True, satisfies_pzs=True, database_version=resources.DatabaseVersion.POSTGRES_13, ) @@ -5772,6 +5781,7 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.satisfies_pzi is True assert response.satisfies_pzs is True assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @@ -5821,6 +5831,7 @@ async def test_get_backup_async( reconciling=True, etag="etag_value", size_bytes=1089, + satisfies_pzi=True, satisfies_pzs=True, database_version=resources.DatabaseVersion.POSTGRES_13, ) @@ -5845,6 +5856,7 @@ async def test_get_backup_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.satisfies_pzi is True assert response.satisfies_pzs is True assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @@ -7426,6 +7438,7 @@ def test_get_connection_info(request_type, transport: str = "grpc"): call.return_value = resources.ConnectionInfo( name="name_value", ip_address="ip_address_value", + public_ip_address="public_ip_address_value", pem_certificate_chain=["pem_certificate_chain_value"], instance_uid="instance_uid_value", ) @@ -7440,6 +7453,7 @@ def test_get_connection_info(request_type, transport: str = "grpc"): assert isinstance(response, resources.ConnectionInfo) assert response.name == "name_value" assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" assert response.pem_certificate_chain == ["pem_certificate_chain_value"] assert response.instance_uid == "instance_uid_value" @@ -7484,6 +7498,7 @@ async def test_get_connection_info_async( resources.ConnectionInfo( name="name_value", ip_address="ip_address_value", + public_ip_address="public_ip_address_value", pem_certificate_chain=["pem_certificate_chain_value"], instance_uid="instance_uid_value", ) @@ -7499,6 +7514,7 @@ async def test_get_connection_info_async( assert isinstance(response, resources.ConnectionInfo) assert response.name == "name_value" assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" assert response.pem_certificate_chain == ["pem_certificate_chain_value"] assert response.instance_uid == "instance_uid_value" @@ -9055,6 +9071,428 @@ async def test_delete_user_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + service.ListDatabasesRequest, + dict, + ], +) +def test_list_databases(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListDatabasesRequest() + + +@pytest.mark.asyncio +async def test_list_databases_async( + transport: str = "grpc_asyncio", request_type=service.ListDatabasesRequest +): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = service.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListDatabasesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_databases_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListDatabasesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_databases(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + +def test_list_databases_pages(transport_name: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_databases), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Database) for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = AlloyDBAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_databases(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -9062,46 +9500,407 @@ async def test_delete_user_flattened_error_async(): dict, ], ) -def test_list_clusters_rest(request_type): +def test_list_clusters_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListClustersResponse.to_json( + service.ListClustersResponse() + ) + + request = service.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=service.ListClustersRequest +): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + service.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + resources.Cluster(), + ], + next_page_token="abc", + ), + service.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + ], + next_page_token="ghi", + ), + service.ListClustersResponse( + clusters=[ + resources.Cluster(), + resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Cluster( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Cluster.State.READY, + cluster_type=resources.Cluster.ClusterType.PRIMARY, + database_version=resources.DatabaseVersion.POSTGRES_13, + network="network_value", + etag="etag_value", + reconciling=True, + satisfies_pzi=True, + satisfies_pzs=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) + return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Cluster) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Cluster.State.READY + assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.network == "network_value" + assert response.etag == "etag_value" + assert response.reconciling is True + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True -def test_list_clusters_rest_required_fields(request_type=service.ListClustersRequest): +def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9116,30 +9915,23 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).get_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).get_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("view",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9148,7 +9940,7 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = resources.Cluster() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9169,40 +9961,30 @@ def test_list_clusters_rest_required_fields(request_type=service.ListClustersReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) + return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.get_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_get_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_clusters_rest_interceptors(null_interceptor): +def test_get_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9215,13 +9997,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_clusters" + transports.AlloyDBAdminRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" + transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) + pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9232,19 +10014,17 @@ def test_list_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListClustersResponse.to_json( - service.ListClustersResponse() - ) + req.return_value._content = resources.Cluster.to_json(resources.Cluster()) - request = service.ListClustersRequest() + request = service.GetClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListClustersResponse() + post.return_value = resources.Cluster() - client.list_clusters( + client.get_cluster( request, metadata=[ ("key", "val"), @@ -9256,8 +10036,8 @@ def test_list_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_clusters_rest_bad_request( - transport: str = "rest", request_type=service.ListClustersRequest +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=service.GetClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9265,7 +10045,7 @@ def test_list_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9277,10 +10057,10 @@ def test_list_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_clusters(request) + client.get_cluster(request) -def test_list_clusters_rest_flattened(): +def test_get_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9289,14 +10069,14 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListClustersResponse() + return_value = resources.Cluster() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -9304,25 +10084,25 @@ def test_list_clusters_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListClustersResponse.pb(return_value) + return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.get_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/clusters" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_get_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9331,136 +10111,200 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - service.ListClustersRequest(), - parent="parent_value", + client.get_cluster( + service.GetClusterRequest(), + name="name_value", ) -def test_list_clusters_rest_pager(transport: str = "rest"): +def test_get_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - resources.Cluster(), - ], - next_page_token="abc", - ), - service.ListClustersResponse( - clusters=[], - next_page_token="def", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - ], - next_page_token="ghi", - ), - service.ListClustersResponse( - clusters=[ - resources.Cluster(), - resources.Cluster(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + "earliest_restorable_time": {}, + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + "satisfies_pzi": True, + "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateClusterRequest.meta.fields["cluster"] - # Wrap the values into proper Response objs - response = tuple(service.ListClustersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - sample_request = {"parent": "projects/sample1/locations/sample2"} + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - pager = client.list_clusters(request=sample_request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Cluster) for i in results) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - service.GetClusterRequest, - dict, - ], -) -def test_get_cluster_rest(request_type): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Cluster( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Cluster.State.READY, - cluster_type=resources.Cluster.ClusterType.PRIMARY, - database_version=resources.DatabaseVersion.POSTGRES_13, - network="network_value", - etag="etag_value", - reconciling=True, - satisfies_pzs=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.create_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Cluster) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Cluster.State.READY - assert response.cluster_type == resources.Cluster.ClusterType.PRIMARY - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 - assert response.network == "network_value" - assert response.etag == "etag_value" - assert response.reconciling is True - assert response.satisfies_pzs is True + assert response.operation.name == "operations/spam" -def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest): +def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9472,26 +10316,38 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).create_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9500,7 +10356,7 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9512,39 +10368,58 @@ def test_get_cluster_rest_required_fields(request_type=service.GetClusterRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.create_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_create_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cluster_rest_interceptors(null_interceptor): +def test_create_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9557,13 +10432,15 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_get_cluster" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) + pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9574,17 +10451,19 @@ def test_get_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Cluster.to_json(resources.Cluster()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetClusterRequest() + request = service.CreateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Cluster() + post.return_value = operations_pb2.Operation() - client.get_cluster( + client.create_cluster( request, metadata=[ ("key", "val"), @@ -9596,8 +10475,8 @@ def test_get_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_cluster_rest_bad_request( - transport: str = "rest", request_type=service.GetClusterRequest +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9605,7 +10484,7 @@ def test_get_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9617,10 +10496,10 @@ def test_get_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_cluster(request) + client.create_cluster(request) -def test_get_cluster_rest_flattened(): +def test_create_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9629,40 +10508,42 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.create_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/clusters" % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_create_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9671,13 +10552,17 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - service.GetClusterRequest(), - name="name_value", + client.create_cluster( + service.CreateClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", ) -def test_get_cluster_rest_error(): +def test_create_cluster_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9686,18 +10571,20 @@ def test_get_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateClusterRequest, + service.UpdateClusterRequest, dict, ], ) -def test_create_cluster_rest(request_type): +def test_update_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } request_init["cluster"] = { "backup_source": { "backup_uid": "backup_uid_value", @@ -9708,7 +10595,7 @@ def test_create_cluster_rest(request_type): "reference_id": "reference_id_value", "source_type": 1, }, - "name": "name_value", + "name": "projects/sample1/locations/sample2/clusters/sample3", "display_name": "display_name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -9768,14 +10655,16 @@ def test_create_cluster_rest(request_type): "secondary_cluster_names_value2", ] }, + "satisfies_pzi": True, "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateClusterRequest.meta.fields["cluster"] + test_field = service.UpdateClusterRequest.meta.fields["cluster"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -9851,18 +10740,16 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.update_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_cluster_rest_required_fields(request_type=service.CreateClusterRequest): +def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9874,38 +10761,29 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).update_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "cluster_id", + "allow_missing", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9926,7 +10804,7 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -9939,45 +10817,34 @@ def test_create_cluster_rest_required_fields(request_type=service.CreateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.update_cluster(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_update_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) + unset_fields = transport.update_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "clusterId", + "allowMissing", "requestId", + "updateMask", "validateOnly", ) ) - & set( - ( - "parent", - "clusterId", - "cluster", - ) - ) + & set(("cluster",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_cluster_rest_interceptors(null_interceptor): +def test_update_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9992,13 +10859,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_cluster" + transports.AlloyDBAdminRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) + pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10013,7 +10880,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateClusterRequest() + request = service.UpdateClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -10021,7 +10888,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cluster( + client.update_cluster( request, metadata=[ ("key", "val"), @@ -10033,8 +10900,8 @@ def test_create_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_cluster_rest_bad_request( - transport: str = "rest", request_type=service.CreateClusterRequest +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=service.UpdateClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10042,7 +10909,9 @@ def test_create_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10054,10 +10923,10 @@ def test_create_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_cluster(request) + client.update_cluster(request) -def test_create_cluster_rest_flattened(): +def test_update_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10069,219 +10938,78 @@ def test_create_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", cluster=resources.Cluster( backup_source=resources.BackupSource(backup_uid="backup_uid_value") ), - cluster_id="cluster_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/clusters" - % client.transport._host, - args[1], - ) - - -def test_create_cluster_rest_flattened_error(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_cluster( - service.CreateClusterRequest(), - parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", - ) - - -def test_create_cluster_rest_error(): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - service.UpdateClusterRequest, - dict, - ], -) -def test_update_cluster_rest(request_type): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } - request_init["cluster"] = { - "backup_source": { - "backup_uid": "backup_uid_value", - "backup_name": "backup_name_value", - }, - "migration_source": { - "host_port": "host_port_value", - "reference_id": "reference_id_value", - "source_type": 1, - }, - "name": "projects/sample1/locations/sample2/clusters/sample3", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "cluster_type": 1, - "database_version": 1, - "network_config": { - "network": "network_value", - "allocated_ip_range": "allocated_ip_range_value", - }, - "network": "network_value", - "etag": "etag_value", - "annotations": {}, - "reconciling": True, - "initial_user": {"user": "user_value", "password": "password_value"}, - "automated_backup_policy": { - "weekly_schedule": { - "start_times": [ - {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} - ], - "days_of_week": [1], - }, - "time_based_retention": { - "retention_period": {"seconds": 751, "nanos": 543} - }, - "quantity_based_retention": {"count": 553}, - "enabled": True, - "backup_window": {}, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "location": "location_value", - "labels": {}, - }, - "ssl_config": {"ssl_mode": 1, "ca_source": 1}, - "encryption_config": {}, - "encryption_info": { - "encryption_type": 1, - "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], - }, - "continuous_backup_config": { - "enabled": True, - "recovery_window_days": 2166, - "encryption_config": {}, - }, - "continuous_backup_info": { - "encryption_info": {}, - "enabled_time": {}, - "schedule": [1], - "earliest_restorable_time": {}, - }, - "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, - "primary_config": { - "secondary_cluster_names": [ - "secondary_cluster_names_value1", - "secondary_cluster_names_value2", - ] - }, - "satisfies_pzs": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateClusterRequest.meta.fields["cluster"] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + client.update_cluster(**mock_args) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}" + % client.transport._host, + args[1], + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - subfields_not_in_runtime = [] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + service.UpdateClusterRequest(), + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +def test_update_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10296,16 +11024,17 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterRequest): +def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10320,26 +11049,30 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).delete_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "etag", + "force", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10360,10 +11093,9 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -10373,34 +11105,34 @@ def test_update_cluster_rest_required_fields(request_type=service.UpdateClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.delete_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): +def test_delete_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_cluster._get_unset_required_fields({}) + unset_fields = transport.delete_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "etag", + "force", "requestId", - "updateMask", "validateOnly", ) ) - & set(("cluster",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_delete_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10415,13 +11147,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_cluster" + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) + pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10436,7 +11168,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateClusterRequest() + request = service.DeleteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -10444,7 +11176,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.delete_cluster( request, metadata=[ ("key", "val"), @@ -10456,8 +11188,8 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_cluster_rest_bad_request( - transport: str = "rest", request_type=service.UpdateClusterRequest +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=service.DeleteClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10465,9 +11197,7 @@ def test_update_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10479,10 +11209,10 @@ def test_update_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_cluster(request) + client.delete_cluster(request) -def test_update_cluster_rest_flattened(): +def test_delete_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10494,16 +11224,11 @@ def test_update_cluster_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "cluster": {"name": "projects/sample1/locations/sample2/clusters/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} # get truthy value for each flattened field mock_args = dict( - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -10514,20 +11239,20 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.delete_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{cluster.name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10536,16 +11261,13 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - service.UpdateClusterRequest(), - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_cluster( + service.DeleteClusterRequest(), + name="name_value", ) -def test_update_cluster_rest_error(): +def test_delete_cluster_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10554,11 +11276,11 @@ def test_update_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteClusterRequest, + service.PromoteClusterRequest, dict, ], ) -def test_delete_cluster_rest(request_type): +def test_promote_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10580,13 +11302,15 @@ def test_delete_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterRequest): +def test_promote_cluster_rest_required_fields( + request_type=service.PromoteClusterRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -10605,7 +11329,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -10614,16 +11338,7 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "etag", - "force", - "request_id", - "validate_only", - ) - ) + ).promote_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10649,9 +11364,10 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -10661,34 +11377,24 @@ def test_delete_cluster_rest_required_fields(request_type=service.DeleteClusterR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.promote_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_promote_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "etag", - "force", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.promote_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_promote_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10703,13 +11409,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) + pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10724,7 +11430,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.DeleteClusterRequest() + request = service.PromoteClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -10732,7 +11438,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cluster( + client.promote_cluster( request, metadata=[ ("key", "val"), @@ -10744,8 +11450,8 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request( - transport: str = "rest", request_type=service.DeleteClusterRequest +def test_promote_cluster_rest_bad_request( + transport: str = "rest", request_type=service.PromoteClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10765,10 +11471,10 @@ def test_delete_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_cluster(request) + client.promote_cluster(request) -def test_delete_cluster_rest_flattened(): +def test_promote_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10795,20 +11501,20 @@ def test_delete_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.promote_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:promote" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_promote_cluster_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10817,13 +11523,13 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - service.DeleteClusterRequest(), + client.promote_cluster( + service.PromoteClusterRequest(), name="name_value", ) -def test_delete_cluster_rest_error(): +def test_promote_cluster_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10832,18 +11538,18 @@ def test_delete_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.PromoteClusterRequest, + service.RestoreClusterRequest, dict, ], ) -def test_promote_cluster_rest(request_type): +def test_restore_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10858,19 +11564,20 @@ def test_promote_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.restore_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_promote_cluster_rest_required_fields( - request_type=service.PromoteClusterRequest, +def test_restore_cluster_rest_required_fields( + request_type=service.RestoreClusterRequest, ): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10885,21 +11592,24 @@ def test_promote_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).restore_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).promote_cluster._get_unset_required_fields(jsonified_request) + ).restore_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10933,24 +11643,33 @@ def test_promote_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.promote_cluster(request) + response = client.restore_cluster(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_promote_cluster_rest_unset_required_fields(): +def test_restore_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.promote_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.restore_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_promote_cluster_rest_interceptors(null_interceptor): +def test_restore_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10965,13 +11684,13 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) + pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10986,7 +11705,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.PromoteClusterRequest() + request = service.RestoreClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -10994,7 +11713,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.promote_cluster( + client.restore_cluster( request, metadata=[ ("key", "val"), @@ -11006,8 +11725,8 @@ def test_promote_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_promote_cluster_rest_bad_request( - transport: str = "rest", request_type=service.PromoteClusterRequest +def test_restore_cluster_rest_bad_request( + transport: str = "rest", request_type=service.RestoreClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11015,7 +11734,7 @@ def test_promote_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11027,65 +11746,10 @@ def test_promote_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.promote_cluster(request) - - -def test_promote_cluster_rest_flattened(): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/clusters/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.promote_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*}:promote" - % client.transport._host, - args[1], - ) - - -def test_promote_cluster_rest_flattened_error(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.promote_cluster( - service.PromoteClusterRequest(), - name="name_value", - ) + client.restore_cluster(request) -def test_promote_cluster_rest_error(): +def test_restore_cluster_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11094,18 +11758,159 @@ def test_promote_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.RestoreClusterRequest, + service.CreateSecondaryClusterRequest, dict, ], ) -def test_restore_cluster_rest(request_type): +def test_create_secondary_cluster_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cluster"] = { + "backup_source": { + "backup_uid": "backup_uid_value", + "backup_name": "backup_name_value", + }, + "migration_source": { + "host_port": "host_port_value", + "reference_id": "reference_id_value", + "source_type": 1, + }, + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "cluster_type": 1, + "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, + "network": "network_value", + "etag": "etag_value", + "annotations": {}, + "reconciling": True, + "initial_user": {"user": "user_value", "password": "password_value"}, + "automated_backup_policy": { + "weekly_schedule": { + "start_times": [ + {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} + ], + "days_of_week": [1], + }, + "time_based_retention": { + "retention_period": {"seconds": 751, "nanos": 543} + }, + "quantity_based_retention": {"count": 553}, + "enabled": True, + "backup_window": {}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "location": "location_value", + "labels": {}, + }, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + "encryption_config": {}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "continuous_backup_config": { + "enabled": True, + "recovery_window_days": 2166, + "encryption_config": {}, + }, + "continuous_backup_info": { + "encryption_info": {}, + "enabled_time": {}, + "schedule": [1], + "earliest_restorable_time": {}, + }, + "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, + "primary_config": { + "secondary_cluster_names": [ + "secondary_cluster_names_value1", + "secondary_cluster_names_value2", + ] + }, + "satisfies_pzi": True, + "satisfies_pzs": True, + "psc_config": {"psc_enabled": True}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSecondaryClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11120,14 +11925,14 @@ def test_restore_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restore_cluster(request) + response = client.create_secondary_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_restore_cluster_rest_required_fields( - request_type=service.RestoreClusterRequest, +def test_create_secondary_cluster_rest_required_fields( + request_type=service.CreateSecondaryClusterRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -11145,20 +11950,31 @@ def test_restore_cluster_rest_required_fields( ) # verify fields with default values are dropped + assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_cluster._get_unset_required_fields(jsonified_request) + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] jsonified_request["parent"] = "parent_value" jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_cluster._get_unset_required_fields(jsonified_request) + ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11199,21 +12015,33 @@ def test_restore_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restore_cluster(request) + response = client.create_secondary_cluster(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_restore_cluster_rest_unset_required_fields(): +def test_create_secondary_cluster_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.restore_cluster._get_unset_required_fields({}) + unset_fields = transport.create_secondary_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) & set( ( "parent", @@ -11225,7 +12053,7 @@ def test_restore_cluster_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_cluster_rest_interceptors(null_interceptor): +def test_create_secondary_cluster_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11240,13 +12068,15 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) + pb_message = service.CreateSecondaryClusterRequest.pb( + service.CreateSecondaryClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11261,7 +12091,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.RestoreClusterRequest() + request = service.CreateSecondaryClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11269,7 +12099,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.restore_cluster( + client.create_secondary_cluster( request, metadata=[ ("key", "val"), @@ -11281,8 +12111,8 @@ def test_restore_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_restore_cluster_rest_bad_request( - transport: str = "rest", request_type=service.RestoreClusterRequest +def test_create_secondary_cluster_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryClusterRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11302,197 +12132,125 @@ def test_restore_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.restore_cluster(request) - - -def test_restore_cluster_rest_error(): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + client.create_secondary_cluster(request) -@pytest.mark.parametrize( - "request_type", - [ - service.CreateSecondaryClusterRequest, - dict, - ], -) -def test_create_secondary_cluster_rest(request_type): +def test_create_secondary_cluster_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["cluster"] = { - "backup_source": { - "backup_uid": "backup_uid_value", - "backup_name": "backup_name_value", - }, - "migration_source": { - "host_port": "host_port_value", - "reference_id": "reference_id_value", - "source_type": 1, - }, - "name": "name_value", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "cluster_type": 1, - "database_version": 1, - "network_config": { - "network": "network_value", - "allocated_ip_range": "allocated_ip_range_value", - }, - "network": "network_value", - "etag": "etag_value", - "annotations": {}, - "reconciling": True, - "initial_user": {"user": "user_value", "password": "password_value"}, - "automated_backup_policy": { - "weekly_schedule": { - "start_times": [ - {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543} - ], - "days_of_week": [1], - }, - "time_based_retention": { - "retention_period": {"seconds": 751, "nanos": 543} - }, - "quantity_based_retention": {"count": 553}, - "enabled": True, - "backup_window": {}, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "location": "location_value", - "labels": {}, - }, - "ssl_config": {"ssl_mode": 1, "ca_source": 1}, - "encryption_config": {}, - "encryption_info": { - "encryption_type": 1, - "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], - }, - "continuous_backup_config": { - "enabled": True, - "recovery_window_days": 2166, - "encryption_config": {}, - }, - "continuous_backup_info": { - "encryption_info": {}, - "enabled_time": {}, - "schedule": [1], - "earliest_restorable_time": {}, - }, - "secondary_config": {"primary_cluster_name": "primary_cluster_name_value"}, - "primary_config": { - "secondary_cluster_names": [ - "secondary_cluster_names_value1", - "secondary_cluster_names_value2", - ] - }, - "satisfies_pzs": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateSecondaryClusterRequest.meta.fields["cluster"] + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + client.create_secondary_cluster(**mock_args) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/clusters:createsecondary" + % client.transport._host, + args[1], + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_create_secondary_cluster_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_secondary_cluster( + service.CreateSecondaryClusterRequest(), + parent="parent_value", + cluster=resources.Cluster( + backup_source=resources.BackupSource(backup_uid="backup_uid_value") + ), + cluster_id="cluster_id_value", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] +def test_create_secondary_cluster_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_secondary_cluster(request) + response = client.list_instances(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_secondary_cluster_rest_required_fields( - request_type=service.CreateSecondaryClusterRequest, -): +def test_list_instances_rest_required_fields(request_type=service.ListInstancesRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" - request_init["cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11504,29 +12262,26 @@ def test_create_secondary_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_secondary_cluster._get_unset_required_fields(jsonified_request) + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "cluster_id", - "request_id", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -11534,8 +12289,6 @@ def test_create_secondary_cluster_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11544,7 +12297,7 @@ def test_create_secondary_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListInstancesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11556,58 +12309,49 @@ def test_create_secondary_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_secondary_cluster(request) + response = client.list_instances(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_secondary_cluster_rest_unset_required_fields(): +def test_list_instances_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_secondary_cluster._get_unset_required_fields({}) + unset_fields = transport.list_instances._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "clusterId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "clusterId", - "cluster", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_secondary_cluster_rest_interceptors(null_interceptor): +def test_list_instances_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11620,17 +12364,13 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" + transports.AlloyDBAdminRestInterceptor, "post_list_instances" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" + transports.AlloyDBAdminRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateSecondaryClusterRequest.pb( - service.CreateSecondaryClusterRequest() - ) + pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11641,19 +12381,19 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListInstancesResponse.to_json( + service.ListInstancesResponse() ) - request = service.CreateSecondaryClusterRequest() + request = service.ListInstancesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListInstancesResponse() - client.create_secondary_cluster( + client.list_instances( request, metadata=[ ("key", "val"), @@ -11665,8 +12405,8 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_secondary_cluster_rest_bad_request( - transport: str = "rest", request_type=service.CreateSecondaryClusterRequest +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=service.ListInstancesRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11674,7 +12414,7 @@ def test_create_secondary_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11686,10 +12426,10 @@ def test_create_secondary_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_secondary_cluster(request) + client.list_instances(request) -def test_create_secondary_cluster_rest_flattened(): +def test_list_instances_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11698,42 +12438,42 @@ def test_create_secondary_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_secondary_cluster(**mock_args) + client.list_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/clusters:createsecondary" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" % client.transport._host, args[1], ) -def test_create_secondary_cluster_rest_flattened_error(transport: str = "rest"): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11742,69 +12482,144 @@ def test_create_secondary_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_secondary_cluster( - service.CreateSecondaryClusterRequest(), + client.list_instances( + service.ListInstancesRequest(), parent="parent_value", - cluster=resources.Cluster( - backup_source=resources.BackupSource(backup_uid="backup_uid_value") - ), - cluster_id="cluster_id_value", ) -def test_create_secondary_cluster_rest_error(): +def test_list_instances_rest_pager(transport: str = "rest"): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + resources.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + ], + next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[ + resources.Instance(), + resources.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListInstancesRequest, + service.GetInstanceRequest, dict, ], ) -def test_list_instances_rest(request_type): +def test_get_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Instance( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Instance.State.READY, + instance_type=resources.Instance.InstanceType.PRIMARY, + availability_type=resources.Instance.AvailabilityType.ZONAL, + gce_zone="gce_zone_value", + ip_address="ip_address_value", + reconciling=True, + etag="etag_value", + satisfies_pzi=True, + satisfies_pzs=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListInstancesResponse.pb(return_value) + return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_instances(request) + response = client.get_instance(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Instance.State.READY + assert response.instance_type == resources.Instance.InstanceType.PRIMARY + assert response.availability_type == resources.Instance.AvailabilityType.ZONAL + assert response.gce_zone == "gce_zone_value" + assert response.ip_address == "ip_address_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True -def test_list_instances_rest_required_fields(request_type=service.ListInstancesRequest): +def test_get_instance_rest_required_fields(request_type=service.GetInstanceRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11819,30 +12634,23 @@ def test_list_instances_rest_required_fields(request_type=service.ListInstancesR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) + ).get_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("view",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11851,7 +12659,7 @@ def test_list_instances_rest_required_fields(request_type=service.ListInstancesR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListInstancesResponse() + return_value = resources.Instance() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11872,40 +12680,30 @@ def test_list_instances_rest_required_fields(request_type=service.ListInstancesR response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListInstancesResponse.pb(return_value) + return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_instances(request) + response = client.get_instance(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_instances_rest_unset_required_fields(): +def test_get_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): +def test_get_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11918,13 +12716,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_instances" + transports.AlloyDBAdminRestInterceptor, "post_get_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_instances" + transports.AlloyDBAdminRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) + pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11935,19 +12733,17 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListInstancesResponse.to_json( - service.ListInstancesResponse() - ) + req.return_value._content = resources.Instance.to_json(resources.Instance()) - request = service.ListInstancesRequest() + request = service.GetInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListInstancesResponse() + post.return_value = resources.Instance() - client.list_instances( + client.get_instance( request, metadata=[ ("key", "val"), @@ -11959,8 +12755,8 @@ def test_list_instances_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_instances_rest_bad_request( - transport: str = "rest", request_type=service.ListInstancesRequest +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=service.GetInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11968,7 +12764,9 @@ def test_list_instances_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11980,10 +12778,10 @@ def test_list_instances_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_instances(request) + client.get_instance(request) -def test_list_instances_rest_flattened(): +def test_get_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11992,16 +12790,16 @@ def test_list_instances_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListInstancesResponse() + return_value = resources.Instance() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -12009,25 +12807,25 @@ def test_list_instances_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListInstancesResponse.pb(return_value) + return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_instances(**mock_args) + client.get_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" % client.transport._host, args[1], ) -def test_list_instances_rest_flattened_error(transport: str = "rest"): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12036,142 +12834,197 @@ def test_list_instances_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_instances( - service.ListInstancesRequest(), - parent="parent_value", + client.get_instance( + service.GetInstanceRequest(), + name="name_value", ) -def test_list_instances_rest_pager(transport: str = "rest"): +def test_get_instance_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - resources.Instance(), - ], - next_page_token="abc", - ), - service.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - ], - next_page_token="ghi", - ), - service.ListInstancesResponse( - instances=[ - resources.Instance(), - resources.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, + "satisfies_pzi": True, + "satisfies_pzs": True, + "psc_instance_config": { + "service_attachment_link": "service_attachment_link_value", + "allowed_consumer_projects": [ + "allowed_consumer_projects_value1", + "allowed_consumer_projects_value2", + ], + "allowed_consumer_networks": [ + "allowed_consumer_networks_value1", + "allowed_consumer_networks_value2", + ], + "psc_interface_configs": [ + { + "consumer_endpoint_ips": [ + "consumer_endpoint_ips_value1", + "consumer_endpoint_ips_value2", + ], + "network_attachment": "network_attachment_value", + } + ], + "outgoing_service_attachment_links": [ + "outgoing_service_attachment_links_value1", + "outgoing_service_attachment_links_value2", + ], + "psc_enabled": True, + }, + "network_config": { + "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], + "enable_public_ip": True, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple(service.ListInstancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateInstanceRequest.meta.fields["instance"] - sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" - } + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pager = client.list_instances(request=sample_request) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Instance) for i in results) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - service.GetInstanceRequest, - dict, - ], -) -def test_get_instance_rest(request_type): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Instance( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Instance.State.READY, - instance_type=resources.Instance.InstanceType.PRIMARY, - availability_type=resources.Instance.AvailabilityType.ZONAL, - gce_zone="gce_zone_value", - ip_address="ip_address_value", - reconciling=True, - etag="etag_value", - satisfies_pzs=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_instance(request) + response = client.create_instance(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Instance.State.READY - assert response.instance_type == resources.Instance.InstanceType.PRIMARY - assert response.availability_type == resources.Instance.AvailabilityType.ZONAL - assert response.gce_zone == "gce_zone_value" - assert response.ip_address == "ip_address_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.satisfies_pzs is True + assert response.operation.name == "operations/spam" -def test_get_instance_rest_required_fields(request_type=service.GetInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=service.CreateInstanceRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12183,26 +13036,38 @@ def test_get_instance_rest_required_fields(request_type=service.GetInstanceReque ) # verify fields with default values are dropped + assert "instanceId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12211,7 +13076,7 @@ def test_get_instance_rest_required_fields(request_type=service.GetInstanceReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Instance() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12223,39 +13088,58 @@ def test_get_instance_rest_required_fields(request_type=service.GetInstanceReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_instance(request) + response = client.create_instance(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_instance_rest_unset_required_fields(): +def test_create_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): +def test_create_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12268,13 +13152,15 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_get_instance" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_get_instance" + transports.AlloyDBAdminRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) + pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12285,17 +13171,19 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Instance.to_json(resources.Instance()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetInstanceRequest() + request = service.CreateInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Instance() + post.return_value = operations_pb2.Operation() - client.get_instance( + client.create_instance( request, metadata=[ ("key", "val"), @@ -12307,8 +13195,8 @@ def test_get_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_instance_rest_bad_request( - transport: str = "rest", request_type=service.GetInstanceRequest +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12316,9 +13204,7 @@ def test_get_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12330,10 +13216,10 @@ def test_get_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_instance(request) + client.create_instance(request) -def test_get_instance_rest_flattened(): +def test_create_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12342,42 +13228,42 @@ def test_get_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Instance() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + "parent": "projects/sample1/locations/sample2/clusters/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_instance(**mock_args) + client.create_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" % client.transport._host, args[1], ) -def test_get_instance_rest_flattened_error(transport: str = "rest"): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12386,13 +13272,15 @@ def test_get_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_instance( - service.GetInstanceRequest(), - name="name_value", + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=resources.Instance(name="name_value"), + instance_id="instance_id_value", ) -def test_get_instance_rest_error(): +def test_create_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12401,11 +13289,11 @@ def test_get_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateInstanceRequest, + service.CreateSecondaryInstanceRequest, dict, ], ) -def test_create_instance_rest(request_type): +def test_create_secondary_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12450,14 +13338,44 @@ def test_create_instance_rest(request_type): "require_connectors": True, "ssl_config": {"ssl_mode": 1, "ca_source": 1}, }, + "satisfies_pzi": True, "satisfies_pzs": True, + "psc_instance_config": { + "service_attachment_link": "service_attachment_link_value", + "allowed_consumer_projects": [ + "allowed_consumer_projects_value1", + "allowed_consumer_projects_value2", + ], + "allowed_consumer_networks": [ + "allowed_consumer_networks_value1", + "allowed_consumer_networks_value2", + ], + "psc_interface_configs": [ + { + "consumer_endpoint_ips": [ + "consumer_endpoint_ips_value1", + "consumer_endpoint_ips_value2", + ], + "network_attachment": "network_attachment_value", + } + ], + "outgoing_service_attachment_links": [ + "outgoing_service_attachment_links_value1", + "outgoing_service_attachment_links_value2", + ], + "psc_enabled": True, + }, + "network_config": { + "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], + "enable_public_ip": True, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateInstanceRequest.meta.fields["instance"] + test_field = service.CreateSecondaryInstanceRequest.meta.fields["instance"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12533,14 +13451,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_instance(request) + response = client.create_secondary_instance(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_instance_rest_required_fields( - request_type=service.CreateInstanceRequest, +def test_create_secondary_instance_rest_required_fields( + request_type=service.CreateSecondaryInstanceRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -12562,7 +13480,7 @@ def test_create_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) + ).create_secondary_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12574,7 +13492,7 @@ def test_create_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) + ).create_secondary_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -12623,7 +13541,7 @@ def test_create_instance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_instance(request) + response = client.create_secondary_instance(request) expected_params = [ ( @@ -12636,12 +13554,12 @@ def test_create_instance_rest_required_fields( assert expected_params == actual_params -def test_create_instance_rest_unset_required_fields(): +def test_create_secondary_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_instance._get_unset_required_fields({}) + unset_fields = transport.create_secondary_instance._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -12661,7 +13579,7 @@ def test_create_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): +def test_create_secondary_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12676,13 +13594,15 @@ def test_create_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_instance" + transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_instance" + transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) + pb_message = service.CreateSecondaryInstanceRequest.pb( + service.CreateSecondaryInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12697,7 +13617,7 @@ def test_create_instance_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateInstanceRequest() + request = service.CreateSecondaryInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -12705,7 +13625,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_instance( + client.create_secondary_instance( request, metadata=[ ("key", "val"), @@ -12717,8 +13637,8 @@ def test_create_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_instance_rest_bad_request( - transport: str = "rest", request_type=service.CreateInstanceRequest +def test_create_secondary_instance_rest_bad_request( + transport: str = "rest", request_type=service.CreateSecondaryInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12738,10 +13658,10 @@ def test_create_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_instance(request) + client.create_secondary_instance(request) -def test_create_instance_rest_flattened(): +def test_create_secondary_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12772,20 +13692,20 @@ def test_create_instance_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_instance(**mock_args) + client.create_secondary_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary" % client.transport._host, args[1], ) -def test_create_instance_rest_flattened_error(transport: str = "rest"): +def test_create_secondary_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12794,15 +13714,15 @@ def test_create_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_instance( - service.CreateInstanceRequest(), + client.create_secondary_instance( + service.CreateSecondaryInstanceRequest(), parent="parent_value", instance=resources.Instance(name="name_value"), instance_id="instance_id_value", ) -def test_create_instance_rest_error(): +def test_create_secondary_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12811,11 +13731,11 @@ def test_create_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateSecondaryInstanceRequest, + service.BatchCreateInstancesRequest, dict, ], ) -def test_create_secondary_instance_rest(request_type): +def test_batch_create_instances_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12823,51 +13743,93 @@ def test_create_secondary_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} - request_init["instance"] = { - "name": "name_value", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "instance_type": 1, - "machine_config": {"cpu_count": 976}, - "availability_type": 1, - "gce_zone": "gce_zone_value", - "database_flags": {}, - "writable_node": { - "zone_id": "zone_id_value", - "id": "id_value", - "ip": "ip_value", - "state": "state_value", - }, - "nodes": {}, - "query_insights_config": { - "record_application_tags": True, - "record_client_address": True, - "query_string_length": 2061, - "query_plans_per_minute": 2378, - }, - "read_pool_config": {"node_count": 1070}, - "ip_address": "ip_address_value", - "reconciling": True, - "etag": "etag_value", - "annotations": {}, - "update_policy": {"mode": 1}, - "client_connection_config": { - "require_connectors": True, - "ssl_config": {"ssl_mode": 1, "ca_source": 1}, - }, - "satisfies_pzs": True, + request_init["requests"] = { + "create_instance_requests": [ + { + "parent": "parent_value", + "instance_id": "instance_id_value", + "instance": { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, + "satisfies_pzi": True, + "satisfies_pzs": True, + "psc_instance_config": { + "service_attachment_link": "service_attachment_link_value", + "allowed_consumer_projects": [ + "allowed_consumer_projects_value1", + "allowed_consumer_projects_value2", + ], + "allowed_consumer_networks": [ + "allowed_consumer_networks_value1", + "allowed_consumer_networks_value2", + ], + "psc_interface_configs": [ + { + "consumer_endpoint_ips": [ + "consumer_endpoint_ips_value1", + "consumer_endpoint_ips_value2", + ], + "network_attachment": "network_attachment_value", + } + ], + "outgoing_service_attachment_links": [ + "outgoing_service_attachment_links_value1", + "outgoing_service_attachment_links_value2", + ], + "psc_enabled": True, + }, + "network_config": { + "authorized_external_networks": [ + {"cidr_range": "cidr_range_value"} + ], + "enable_public_ip": True, + }, + }, + "request_id": "request_id_value", + "validate_only": True, + } + ] } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateSecondaryInstanceRequest.meta.fields["instance"] + test_field = service.BatchCreateInstancesRequest.meta.fields["requests"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12895,7 +13857,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["requests"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12925,10 +13887,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] + for i in range(0, len(request_init["requests"][field])): + del request_init["requests"][field][i][subfield] else: - del request_init["instance"][field][subfield] + del request_init["requests"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12943,20 +13905,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_secondary_instance(request) + response = client.batch_create_instances(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_secondary_instance_rest_required_fields( - request_type=service.CreateSecondaryInstanceRequest, +def test_batch_create_instances_rest_required_fields( + request_type=service.BatchCreateInstancesRequest, ): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" - request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12968,38 +13929,26 @@ def test_create_secondary_instance_rest_required_fields( ) # verify fields with default values are dropped - assert "instanceId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_secondary_instance._get_unset_required_fields(jsonified_request) + ).batch_create_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == request_init["instance_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["instanceId"] = "instance_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_secondary_instance._get_unset_required_fields(jsonified_request) + ).batch_create_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "instance_id", - "request_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == "instance_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13033,45 +13982,32 @@ def test_create_secondary_instance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_secondary_instance(request) + response = client.batch_create_instances(request) - expected_params = [ - ( - "instanceId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_secondary_instance_rest_unset_required_fields(): +def test_batch_create_instances_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_secondary_instance._get_unset_required_fields({}) + unset_fields = transport.batch_create_instances._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "instanceId", - "requestId", - "validateOnly", - ) - ) + set(("requestId",)) & set( ( "parent", - "instanceId", - "instance", + "requests", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_secondary_instance_rest_interceptors(null_interceptor): +def test_batch_create_instances_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13086,14 +14022,14 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" + transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" + transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateSecondaryInstanceRequest.pb( - service.CreateSecondaryInstanceRequest() + pb_message = service.BatchCreateInstancesRequest.pb( + service.BatchCreateInstancesRequest() ) transcode.return_value = { "method": "post", @@ -13109,7 +14045,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateSecondaryInstanceRequest() + request = service.BatchCreateInstancesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13117,7 +14053,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_secondary_instance( + client.batch_create_instances( request, metadata=[ ("key", "val"), @@ -13129,8 +14065,8 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_secondary_instance_rest_bad_request( - transport: str = "rest", request_type=service.CreateSecondaryInstanceRequest +def test_batch_create_instances_rest_bad_request( + transport: str = "rest", request_type=service.BatchCreateInstancesRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13142,79 +14078,18 @@ def test_create_secondary_instance_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_secondary_instance(request) - - -def test_create_secondary_instance_rest_flattened(): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_secondary_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/instances:createsecondary" - % client.transport._host, - args[1], - ) - - -def test_create_secondary_instance_rest_flattened_error(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_secondary_instance( - service.CreateSecondaryInstanceRequest(), - parent="parent_value", - instance=resources.Instance(name="name_value"), - instance_id="instance_id_value", - ) + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_instances(request) -def test_create_secondary_instance_rest_error(): +def test_batch_create_instances_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13223,73 +14098,97 @@ def test_create_secondary_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.BatchCreateInstancesRequest, + service.UpdateInstanceRequest, dict, ], ) -def test_batch_create_instances_rest(request_type): +def test_update_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} - request_init["requests"] = { - "create_instance_requests": [ - { - "parent": "parent_value", - "instance_id": "instance_id_value", - "instance": { - "name": "name_value", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "instance_type": 1, - "machine_config": {"cpu_count": 976}, - "availability_type": 1, - "gce_zone": "gce_zone_value", - "database_flags": {}, - "writable_node": { - "zone_id": "zone_id_value", - "id": "id_value", - "ip": "ip_value", - "state": "state_value", - }, - "nodes": {}, - "query_insights_config": { - "record_application_tags": True, - "record_client_address": True, - "query_string_length": 2061, - "query_plans_per_minute": 2378, - }, - "read_pool_config": {"node_count": 1070}, - "ip_address": "ip_address_value", - "reconciling": True, - "etag": "etag_value", - "annotations": {}, - "update_policy": {"mode": 1}, - "client_connection_config": { - "require_connectors": True, - "ssl_config": {"ssl_mode": 1, "ca_source": 1}, - }, - "satisfies_pzs": True, - }, - "request_id": "request_id_value", - "validate_only": True, - } - ] + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "instance_type": 1, + "machine_config": {"cpu_count": 976}, + "availability_type": 1, + "gce_zone": "gce_zone_value", + "database_flags": {}, + "writable_node": { + "zone_id": "zone_id_value", + "id": "id_value", + "ip": "ip_value", + "state": "state_value", + }, + "nodes": {}, + "query_insights_config": { + "record_application_tags": True, + "record_client_address": True, + "query_string_length": 2061, + "query_plans_per_minute": 2378, + }, + "read_pool_config": {"node_count": 1070}, + "ip_address": "ip_address_value", + "reconciling": True, + "etag": "etag_value", + "annotations": {}, + "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, + "satisfies_pzi": True, + "satisfies_pzs": True, + "psc_instance_config": { + "service_attachment_link": "service_attachment_link_value", + "allowed_consumer_projects": [ + "allowed_consumer_projects_value1", + "allowed_consumer_projects_value2", + ], + "allowed_consumer_networks": [ + "allowed_consumer_networks_value1", + "allowed_consumer_networks_value2", + ], + "psc_interface_configs": [ + { + "consumer_endpoint_ips": [ + "consumer_endpoint_ips_value1", + "consumer_endpoint_ips_value2", + ], + "network_attachment": "network_attachment_value", + } + ], + "outgoing_service_attachment_links": [ + "outgoing_service_attachment_links_value1", + "outgoing_service_attachment_links_value2", + ], + "psc_enabled": True, + }, + "network_config": { + "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], + "enable_public_ip": True, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.BatchCreateInstancesRequest.meta.fields["requests"] + test_field = service.UpdateInstanceRequest.meta.fields["instance"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -13317,7 +14216,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["requests"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -13347,10 +14246,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["requests"][field])): - del request_init["requests"][field][i][subfield] + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] else: - del request_init["requests"][field][subfield] + del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13365,19 +14264,18 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_create_instances(request) + response = client.update_instance(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_batch_create_instances_rest_required_fields( - request_type=service.BatchCreateInstancesRequest, +def test_update_instance_rest_required_fields( + request_type=service.UpdateInstanceRequest, ): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13392,23 +14290,26 @@ def test_batch_create_instances_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_create_instances._get_unset_required_fields(jsonified_request) + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_create_instances._get_unset_required_fields(jsonified_request) + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13429,7 +14330,7 @@ def test_batch_create_instances_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -13442,32 +14343,34 @@ def test_batch_create_instances_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_create_instances(request) + response = client.update_instance(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_create_instances_rest_unset_required_fields(): +def test_update_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_create_instances._get_unset_required_fields({}) + unset_fields = transport.update_instance._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) - & set( + set( ( - "parent", - "requests", + "allowMissing", + "requestId", + "updateMask", + "validateOnly", ) ) + & set(("instance",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_instances_rest_interceptors(null_interceptor): +def test_update_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13482,15 +14385,13 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" + transports.AlloyDBAdminRestInterceptor, "post_update_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" + transports.AlloyDBAdminRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.BatchCreateInstancesRequest.pb( - service.BatchCreateInstancesRequest() - ) + pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13505,7 +14406,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.BatchCreateInstancesRequest() + request = service.UpdateInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13513,7 +14414,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.batch_create_instances( + client.update_instance( request, metadata=[ ("key", "val"), @@ -13525,8 +14426,8 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_create_instances_rest_bad_request( - transport: str = "rest", request_type=service.BatchCreateInstancesRequest +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=service.UpdateInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13534,7 +14435,11 @@ def test_batch_create_instances_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13546,10 +14451,71 @@ def test_batch_create_instances_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_create_instances(request) + client.update_instance(request) -def test_batch_create_instances_rest_error(): +def test_update_instance_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": { + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{instance.name=projects/*/locations/*/clusters/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + service.UpdateInstanceRequest(), + instance=resources.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13558,11 +14524,11 @@ def test_batch_create_instances_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateInstanceRequest, + service.DeleteInstanceRequest, dict, ], ) -def test_update_instance_rest(request_type): +def test_delete_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13570,116 +14536,8 @@ def test_update_instance_rest(request_type): # send a request that will satisfy transcoding request_init = { - "instance": { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "instance_type": 1, - "machine_config": {"cpu_count": 976}, - "availability_type": 1, - "gce_zone": "gce_zone_value", - "database_flags": {}, - "writable_node": { - "zone_id": "zone_id_value", - "id": "id_value", - "ip": "ip_value", - "state": "state_value", - }, - "nodes": {}, - "query_insights_config": { - "record_application_tags": True, - "record_client_address": True, - "query_string_length": 2061, - "query_plans_per_minute": 2378, - }, - "read_pool_config": {"node_count": 1070}, - "ip_address": "ip_address_value", - "reconciling": True, - "etag": "etag_value", - "annotations": {}, - "update_policy": {"mode": 1}, - "client_connection_config": { - "require_connectors": True, - "ssl_config": {"ssl_mode": 1, "ca_source": 1}, - }, - "satisfies_pzs": True, + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13694,18 +14552,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_instance(request) + response = client.delete_instance(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_instance_rest_required_fields( - request_type=service.UpdateInstanceRequest, +def test_delete_instance_rest_required_fields( + request_type=service.DeleteInstanceRequest, ): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13720,26 +14579,29 @@ def test_update_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) + ).delete_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "etag", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13760,10 +14622,9 @@ def test_update_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13773,34 +14634,33 @@ def test_update_instance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_instance(request) + response = client.delete_instance(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_instance_rest_unset_required_fields(): +def test_delete_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_instance._get_unset_required_fields({}) + unset_fields = transport.delete_instance._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "etag", "requestId", - "updateMask", "validateOnly", ) ) - & set(("instance",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): +def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13815,13 +14675,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_instance" + transports.AlloyDBAdminRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_instance" + transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) + pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13836,7 +14696,7 @@ def test_update_instance_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateInstanceRequest() + request = service.DeleteInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13844,7 +14704,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_instance( + client.delete_instance( request, metadata=[ ("key", "val"), @@ -13856,8 +14716,8 @@ def test_update_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_instance_rest_bad_request( - transport: str = "rest", request_type=service.UpdateInstanceRequest +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=service.DeleteInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13866,9 +14726,7 @@ def test_update_instance_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "instance": { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" } request = request_type(**request_init) @@ -13881,10 +14739,10 @@ def test_update_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_instance(request) + client.delete_instance(request) -def test_update_instance_rest_flattened(): +def test_delete_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13897,15 +14755,12 @@ def test_update_instance_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "instance": { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" } # get truthy value for each flattened field mock_args = dict( - instance=resources.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -13916,20 +14771,20 @@ def test_update_instance_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_instance(**mock_args) + client.delete_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{instance.name=projects/*/locations/*/clusters/*/instances/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" % client.transport._host, args[1], ) -def test_update_instance_rest_flattened_error(transport: str = "rest"): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13938,14 +14793,13 @@ def test_update_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_instance( - service.UpdateInstanceRequest(), - instance=resources.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_instance( + service.DeleteInstanceRequest(), + name="name_value", ) -def test_update_instance_rest_error(): +def test_delete_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13954,11 +14808,11 @@ def test_update_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteInstanceRequest, + service.FailoverInstanceRequest, dict, ], ) -def test_delete_instance_rest(request_type): +def test_failover_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13982,14 +14836,14 @@ def test_delete_instance_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_instance(request) + response = client.failover_instance(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_instance_rest_required_fields( - request_type=service.DeleteInstanceRequest, +def test_failover_instance_rest_required_fields( + request_type=service.FailoverInstanceRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -14009,7 +14863,7 @@ def test_delete_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) + ).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14018,15 +14872,7 @@ def test_delete_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "etag", - "request_id", - "validate_only", - ) - ) + ).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14052,9 +14898,10 @@ def test_delete_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -14064,33 +14911,24 @@ def test_delete_instance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_instance(request) + response = client.failover_instance(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_instance_rest_unset_required_fields(): +def test_failover_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "etag", - "requestId", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.failover_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): +def test_failover_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14105,13 +14943,15 @@ def test_delete_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_delete_instance" + transports.AlloyDBAdminRestInterceptor, "post_failover_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" + transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) + pb_message = service.FailoverInstanceRequest.pb( + service.FailoverInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14126,7 +14966,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.DeleteInstanceRequest() + request = service.FailoverInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -14134,7 +14974,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_instance( + client.failover_instance( request, metadata=[ ("key", "val"), @@ -14146,8 +14986,8 @@ def test_delete_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_instance_rest_bad_request( - transport: str = "rest", request_type=service.DeleteInstanceRequest +def test_failover_instance_rest_bad_request( + transport: str = "rest", request_type=service.FailoverInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14169,10 +15009,10 @@ def test_delete_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_instance(request) + client.failover_instance(request) -def test_delete_instance_rest_flattened(): +def test_failover_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14201,20 +15041,20 @@ def test_delete_instance_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_instance(**mock_args) + client.failover_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:failover" % client.transport._host, args[1], ) -def test_delete_instance_rest_flattened_error(transport: str = "rest"): +def test_failover_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14223,13 +15063,13 @@ def test_delete_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_instance( - service.DeleteInstanceRequest(), + client.failover_instance( + service.FailoverInstanceRequest(), name="name_value", ) -def test_delete_instance_rest_error(): +def test_failover_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14238,11 +15078,11 @@ def test_delete_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.FailoverInstanceRequest, + service.InjectFaultRequest, dict, ], ) -def test_failover_instance_rest(request_type): +def test_inject_fault_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14266,15 +15106,13 @@ def test_failover_instance_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.failover_instance(request) + response = client.inject_fault(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_failover_instance_rest_required_fields( - request_type=service.FailoverInstanceRequest, -): +def test_inject_fault_rest_required_fields(request_type=service.InjectFaultRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -14293,7 +15131,7 @@ def test_failover_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).failover_instance._get_unset_required_fields(jsonified_request) + ).inject_fault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14302,7 +15140,7 @@ def test_failover_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).failover_instance._get_unset_required_fields(jsonified_request) + ).inject_fault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14341,24 +15179,32 @@ def test_failover_instance_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.failover_instance(request) + response = client.inject_fault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_failover_instance_rest_unset_required_fields(): +def test_inject_fault_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.failover_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.inject_fault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "faultType", + "name", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_failover_instance_rest_interceptors(null_interceptor): +def test_inject_fault_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14373,15 +15219,13 @@ def test_failover_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_failover_instance" + transports.AlloyDBAdminRestInterceptor, "post_inject_fault" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" + transports.AlloyDBAdminRestInterceptor, "pre_inject_fault" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.FailoverInstanceRequest.pb( - service.FailoverInstanceRequest() - ) + pb_message = service.InjectFaultRequest.pb(service.InjectFaultRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14396,7 +15240,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.FailoverInstanceRequest() + request = service.InjectFaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -14404,7 +15248,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.failover_instance( + client.inject_fault( request, metadata=[ ("key", "val"), @@ -14416,8 +15260,8 @@ def test_failover_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_failover_instance_rest_bad_request( - transport: str = "rest", request_type=service.FailoverInstanceRequest +def test_inject_fault_rest_bad_request( + transport: str = "rest", request_type=service.InjectFaultRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14439,10 +15283,10 @@ def test_failover_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.failover_instance(request) + client.inject_fault(request) -def test_failover_instance_rest_flattened(): +def test_inject_fault_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14460,6 +15304,7 @@ def test_failover_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( + fault_type=service.InjectFaultRequest.FaultType.STOP_VM, name="name_value", ) mock_args.update(sample_request) @@ -14471,20 +15316,20 @@ def test_failover_instance_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.failover_instance(**mock_args) + client.inject_fault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:failover" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:injectFault" % client.transport._host, args[1], ) -def test_failover_instance_rest_flattened_error(transport: str = "rest"): +def test_inject_fault_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14493,13 +15338,14 @@ def test_failover_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.failover_instance( - service.FailoverInstanceRequest(), + client.inject_fault( + service.InjectFaultRequest(), + fault_type=service.InjectFaultRequest.FaultType.STOP_VM, name="name_value", ) -def test_failover_instance_rest_error(): +def test_inject_fault_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14508,11 +15354,11 @@ def test_failover_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.InjectFaultRequest, + service.RestartInstanceRequest, dict, ], ) -def test_inject_fault_rest(request_type): +def test_restart_instance_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14536,13 +15382,15 @@ def test_inject_fault_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.inject_fault(request) + response = client.restart_instance(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_inject_fault_rest_required_fields(request_type=service.InjectFaultRequest): +def test_restart_instance_rest_required_fields( + request_type=service.RestartInstanceRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -14561,7 +15409,7 @@ def test_inject_fault_rest_required_fields(request_type=service.InjectFaultReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).inject_fault._get_unset_required_fields(jsonified_request) + ).restart_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14570,7 +15418,7 @@ def test_inject_fault_rest_required_fields(request_type=service.InjectFaultReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).inject_fault._get_unset_required_fields(jsonified_request) + ).restart_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14609,32 +15457,24 @@ def test_inject_fault_rest_required_fields(request_type=service.InjectFaultReque response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.inject_fault(request) + response = client.restart_instance(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_inject_fault_rest_unset_required_fields(): +def test_restart_instance_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.inject_fault._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "faultType", - "name", - ) - ) - ) + unset_fields = transport.restart_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_inject_fault_rest_interceptors(null_interceptor): +def test_restart_instance_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14649,13 +15489,13 @@ def test_inject_fault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_inject_fault" + transports.AlloyDBAdminRestInterceptor, "post_restart_instance" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_inject_fault" + transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.InjectFaultRequest.pb(service.InjectFaultRequest()) + pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14670,7 +15510,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.InjectFaultRequest() + request = service.RestartInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -14678,7 +15518,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.inject_fault( + client.restart_instance( request, metadata=[ ("key", "val"), @@ -14690,8 +15530,8 @@ def test_inject_fault_rest_interceptors(null_interceptor): post.assert_called_once() -def test_inject_fault_rest_bad_request( - transport: str = "rest", request_type=service.InjectFaultRequest +def test_restart_instance_rest_bad_request( + transport: str = "rest", request_type=service.RestartInstanceRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14713,10 +15553,10 @@ def test_inject_fault_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.inject_fault(request) + client.restart_instance(request) -def test_inject_fault_rest_flattened(): +def test_restart_instance_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14734,7 +15574,6 @@ def test_inject_fault_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - fault_type=service.InjectFaultRequest.FaultType.STOP_VM, name="name_value", ) mock_args.update(sample_request) @@ -14746,20 +15585,20 @@ def test_inject_fault_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.inject_fault(**mock_args) + client.restart_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:injectFault" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:restart" % client.transport._host, args[1], ) -def test_inject_fault_rest_flattened_error(transport: str = "rest"): +def test_restart_instance_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14768,14 +15607,13 @@ def test_inject_fault_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.inject_fault( - service.InjectFaultRequest(), - fault_type=service.InjectFaultRequest.FaultType.STOP_VM, + client.restart_instance( + service.RestartInstanceRequest(), name="name_value", ) -def test_inject_fault_rest_error(): +def test_restart_instance_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14784,47 +15622,50 @@ def test_inject_fault_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.RestartInstanceRequest, + service.ListBackupsRequest, dict, ], ) -def test_restart_instance_rest(request_type): +def test_list_backups_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restart_instance(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_restart_instance_rest_required_fields( - request_type=service.RestartInstanceRequest, -): +def test_list_backups_rest_required_fields(request_type=service.ListBackupsRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14839,21 +15680,30 @@ def test_restart_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restart_instance._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restart_instance._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14862,7 +15712,7 @@ def test_restart_instance_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14874,37 +15724,49 @@ def test_restart_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restart_instance(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_restart_instance_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.restart_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restart_instance_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14917,15 +15779,13 @@ def test_restart_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_restart_instance" + transports.AlloyDBAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" + transports.AlloyDBAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) + pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14936,19 +15796,19 @@ def test_restart_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListBackupsResponse.to_json( + service.ListBackupsResponse() ) - request = service.RestartInstanceRequest() + request = service.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListBackupsResponse() - client.restart_instance( + client.list_backups( request, metadata=[ ("key", "val"), @@ -14960,8 +15820,8 @@ def test_restart_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_restart_instance_rest_bad_request( - transport: str = "rest", request_type=service.RestartInstanceRequest +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=service.ListBackupsRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14969,9 +15829,7 @@ def test_restart_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14983,10 +15841,10 @@ def test_restart_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.restart_instance(request) + client.list_backups(request) -def test_restart_instance_rest_flattened(): +def test_list_backups_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14995,40 +15853,40 @@ def test_restart_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListBackupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.restart_instance(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/instances/*}:restart" + "%s/v1alpha/{parent=projects/*/locations/*}/backups" % client.transport._host, args[1], ) -def test_restart_instance_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15037,65 +15895,144 @@ def test_restart_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.restart_instance( - service.RestartInstanceRequest(), - name="name_value", + client.list_backups( + service.ListBackupsRequest(), + parent="parent_value", ) -def test_restart_instance_rest_error(): +def test_list_backups_rest_pager(transport: str = "rest"): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + resources.Backup(), + ], + next_page_token="abc", + ), + service.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + ], + next_page_token="ghi", + ), + service.ListBackupsResponse( + backups=[ + resources.Backup(), + resources.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListBackupsRequest, + service.GetBackupRequest, dict, ], ) -def test_list_backups_rest(request_type): +def test_get_backup_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.Backup( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=resources.Backup.State.READY, + type_=resources.Backup.Type.ON_DEMAND, + description="description_value", + cluster_uid="cluster_uid_value", + cluster_name="cluster_name_value", + reconciling=True, + etag="etag_value", + size_bytes=1089, + satisfies_pzi=True, + satisfies_pzs=True, + database_version=resources.DatabaseVersion.POSTGRES_13, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListBackupsResponse.pb(return_value) + return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backups(request) + response = client.get_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.Backup) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == resources.Backup.State.READY + assert response.type_ == resources.Backup.Type.ON_DEMAND + assert response.description == "description_value" + assert response.cluster_uid == "cluster_uid_value" + assert response.cluster_name == "cluster_name_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.size_bytes == 1089 + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 -def test_list_backups_rest_required_fields(request_type=service.ListBackupsRequest): +def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15110,30 +16047,21 @@ def test_list_backups_rest_required_fields(request_type=service.ListBackupsReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15142,7 +16070,7 @@ def test_list_backups_rest_required_fields(request_type=service.ListBackupsReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListBackupsResponse() + return_value = resources.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15163,40 +16091,30 @@ def test_list_backups_rest_required_fields(request_type=service.ListBackupsReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListBackupsResponse.pb(return_value) + return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backups(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): +def test_get_backup_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15209,13 +16127,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_backups" + transports.AlloyDBAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_backups" + transports.AlloyDBAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) + pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15226,19 +16144,17 @@ def test_list_backups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListBackupsResponse.to_json( - service.ListBackupsResponse() - ) + req.return_value._content = resources.Backup.to_json(resources.Backup()) - request = service.ListBackupsRequest() + request = service.GetBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListBackupsResponse() + post.return_value = resources.Backup() - client.list_backups( + client.get_backup( request, metadata=[ ("key", "val"), @@ -15250,8 +16166,8 @@ def test_list_backups_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_backups_rest_bad_request( - transport: str = "rest", request_type=service.ListBackupsRequest +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=service.GetBackupRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15259,7 +16175,7 @@ def test_list_backups_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15271,10 +16187,10 @@ def test_list_backups_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_backups(request) + client.get_backup(request) -def test_list_backups_rest_flattened(): +def test_get_backup_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15283,14 +16199,14 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListBackupsResponse() + return_value = resources.Backup() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -15298,169 +16214,181 @@ def test_list_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListBackupsResponse.pb(return_value) + return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_backups(**mock_args) + client.get_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/backups" - % client.transport._host, - args[1], - ) - - -def test_list_backups_rest_flattened_error(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - service.ListBackupsRequest(), - parent="parent_value", - ) - - -def test_list_backups_rest_pager(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - resources.Backup(), - ], - next_page_token="abc", - ), - service.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - ], - next_page_token="ghi", - ), - service.ListBackupsResponse( - backups=[ - resources.Backup(), - resources.Backup(), - ], - ), + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/backups/*}" + % client.transport._host, + args[1], ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(service.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_backups(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + service.GetBackupRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Backup) for i in results) - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_backup_rest_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - service.GetBackupRequest, + service.CreateBackupRequest, dict, ], ) -def test_get_backup_rest(request_type): +def test_create_backup_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "labels": {}, + "state": 1, + "type_": 1, + "description": "description_value", + "cluster_uid": "cluster_uid_value", + "cluster_name": "cluster_name_value", + "reconciling": True, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "encryption_info": { + "encryption_type": 1, + "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], + }, + "etag": "etag_value", + "annotations": {}, + "size_bytes": 1089, + "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "satisfies_pzi": True, + "satisfies_pzs": True, + "database_version": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Backup( - name="name_value", - display_name="display_name_value", - uid="uid_value", - state=resources.Backup.State.READY, - type_=resources.Backup.Type.ON_DEMAND, - description="description_value", - cluster_uid="cluster_uid_value", - cluster_name="cluster_name_value", - reconciling=True, - etag="etag_value", - size_bytes=1089, - satisfies_pzs=True, - database_version=resources.DatabaseVersion.POSTGRES_13, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_backup(request) + response = client.create_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Backup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.uid == "uid_value" - assert response.state == resources.Backup.State.READY - assert response.type_ == resources.Backup.Type.ON_DEMAND - assert response.description == "description_value" - assert response.cluster_uid == "cluster_uid_value" - assert response.cluster_name == "cluster_name_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.size_bytes == 1089 - assert response.satisfies_pzs is True - assert response.database_version == resources.DatabaseVersion.POSTGRES_13 + assert response.operation.name == "operations/spam" -def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): +def test_create_backup_rest_required_fields(request_type=service.CreateBackupRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15472,24 +16400,38 @@ def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): ) # verify fields with default values are dropped + assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_id", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15498,7 +16440,7 @@ def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Backup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15510,39 +16452,58 @@ def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_backup(request) + response = client.create_backup(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_rest_unset_required_fields(): +def test_create_backup_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): +def test_create_backup_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15555,13 +16516,15 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_get_backup" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_get_backup" + transports.AlloyDBAdminRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) + pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15572,17 +16535,19 @@ def test_get_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.Backup.to_json(resources.Backup()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = service.GetBackupRequest() + request = service.CreateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Backup() + post.return_value = operations_pb2.Operation() - client.get_backup( + client.create_backup( request, metadata=[ ("key", "val"), @@ -15594,8 +16559,8 @@ def test_get_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_backup_rest_bad_request( - transport: str = "rest", request_type=service.GetBackupRequest +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=service.CreateBackupRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15603,7 +16568,7 @@ def test_get_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15615,10 +16580,10 @@ def test_get_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_backup(request) + client.create_backup(request) -def test_get_backup_rest_flattened(): +def test_create_backup_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15627,40 +16592,40 @@ def test_get_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Backup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_backup(**mock_args) + client.create_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/backups/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/backups" % client.transport._host, args[1], ) -def test_get_backup_rest_flattened_error(transport: str = "rest"): +def test_create_backup_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15669,13 +16634,15 @@ def test_get_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - service.GetBackupRequest(), - name="name_value", + client.create_backup( + service.CreateBackupRequest(), + parent="parent_value", + backup=resources.Backup(name="name_value"), + backup_id="backup_id_value", ) -def test_get_backup_rest_error(): +def test_create_backup_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15684,20 +16651,22 @@ def test_get_backup_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateBackupRequest, + service.UpdateBackupRequest, dict, ], ) -def test_create_backup_rest(request_type): +def test_update_backup_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } request_init["backup"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/backups/sample3", "display_name": "display_name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -15720,6 +16689,7 @@ def test_create_backup_rest(request_type): "size_bytes": 1089, "expiry_time": {}, "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "satisfies_pzi": True, "satisfies_pzs": True, "database_version": 1, } @@ -15728,7 +16698,7 @@ def test_create_backup_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateBackupRequest.meta.fields["backup"] + test_field = service.UpdateBackupRequest.meta.fields["backup"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -15804,18 +16774,16 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_backup(request) + response = client.update_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_backup_rest_required_fields(request_type=service.CreateBackupRequest): +def test_update_backup_rest_required_fields(request_type=service.UpdateBackupRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15827,38 +16795,29 @@ def test_create_backup_rest_required_fields(request_type=service.CreateBackupReq ) # verify fields with default values are dropped - assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == request_init["backup_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "backup_id", + "allow_missing", "request_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == "backup_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15879,7 +16838,7 @@ def test_create_backup_rest_required_fields(request_type=service.CreateBackupReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -15892,45 +16851,34 @@ def test_create_backup_rest_required_fields(request_type=service.CreateBackupReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_backup(request) + response = client.update_backup(request) - expected_params = [ - ( - "backupId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_rest_unset_required_fields(): +def test_update_backup_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup._get_unset_required_fields({}) + unset_fields = transport.update_backup._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "backupId", + "allowMissing", "requestId", + "updateMask", "validateOnly", ) ) - & set( - ( - "parent", - "backupId", - "backup", - ) - ) + & set(("backup",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_rest_interceptors(null_interceptor): +def test_update_backup_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15945,13 +16893,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_backup" + transports.AlloyDBAdminRestInterceptor, "post_update_backup" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_backup" + transports.AlloyDBAdminRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) + pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15966,7 +16914,7 @@ def test_create_backup_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.CreateBackupRequest() + request = service.UpdateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -15974,7 +16922,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_backup( + client.update_backup( request, metadata=[ ("key", "val"), @@ -15986,8 +16934,8 @@ def test_create_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_backup_rest_bad_request( - transport: str = "rest", request_type=service.CreateBackupRequest +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=service.UpdateBackupRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15995,7 +16943,9 @@ def test_create_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16007,10 +16957,10 @@ def test_create_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_backup(request) + client.update_backup(request) -def test_create_backup_rest_flattened(): +def test_update_backup_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16022,13 +16972,14 @@ def test_create_backup_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -16039,20 +16990,20 @@ def test_create_backup_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_backup(**mock_args) + client.update_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/backups" + "%s/v1alpha/{backup.name=projects/*/locations/*/backups/*}" % client.transport._host, args[1], ) -def test_create_backup_rest_flattened_error(transport: str = "rest"): +def test_update_backup_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16061,15 +17012,14 @@ def test_create_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - service.CreateBackupRequest(), - parent="parent_value", + client.update_backup( + service.UpdateBackupRequest(), backup=resources.Backup(name="name_value"), - backup_id="backup_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_backup_rest_error(): +def test_update_backup_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16078,114 +17028,18 @@ def test_create_backup_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateBackupRequest, + service.DeleteBackupRequest, dict, ], ) -def test_update_backup_rest(request_type): +def test_delete_backup_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} - } - request_init["backup"] = { - "name": "projects/sample1/locations/sample2/backups/sample3", - "display_name": "display_name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "labels": {}, - "state": 1, - "type_": 1, - "description": "description_value", - "cluster_uid": "cluster_uid_value", - "cluster_name": "cluster_name_value", - "reconciling": True, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "encryption_info": { - "encryption_type": 1, - "kms_key_versions": ["kms_key_versions_value1", "kms_key_versions_value2"], - }, - "etag": "etag_value", - "annotations": {}, - "size_bytes": 1089, - "expiry_time": {}, - "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, - "satisfies_pzs": True, - "database_version": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16200,16 +17054,17 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_backup(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_backup_rest_required_fields(request_type=service.UpdateBackupRequest): +def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16224,26 +17079,29 @@ def test_update_backup_rest_required_fields(request_type=service.UpdateBackupReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", + "etag", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16264,10 +17122,9 @@ def test_update_backup_rest_required_fields(request_type=service.UpdateBackupReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -16277,34 +17134,33 @@ def test_update_backup_rest_required_fields(request_type=service.UpdateBackupReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_backup(request) + response = client.delete_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_rest_unset_required_fields(): +def test_delete_backup_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup._get_unset_required_fields({}) + unset_fields = transport.delete_backup._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", + "etag", "requestId", - "updateMask", "validateOnly", ) ) - & set(("backup",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16319,13 +17175,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_backup" + transports.AlloyDBAdminRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_backup" + transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) + pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16340,7 +17196,7 @@ def test_update_backup_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = service.UpdateBackupRequest() + request = service.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16348,7 +17204,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_backup( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -16360,8 +17216,8 @@ def test_update_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_backup_rest_bad_request( - transport: str = "rest", request_type=service.UpdateBackupRequest +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=service.DeleteBackupRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16369,9 +17225,7 @@ def test_update_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16383,10 +17237,10 @@ def test_update_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_backup(request) + client.delete_backup(request) -def test_update_backup_rest_flattened(): +def test_delete_backup_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16398,14 +17252,11 @@ def test_update_backup_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "backup": {"name": "projects/sample1/locations/sample2/backups/sample3"} - } + sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( - backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -16416,20 +17267,20 @@ def test_update_backup_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_backup(**mock_args) + client.delete_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{backup.name=projects/*/locations/*/backups/*}" + "%s/v1alpha/{name=projects/*/locations/*/backups/*}" % client.transport._host, args[1], ) -def test_update_backup_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16438,14 +17289,13 @@ def test_update_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - service.UpdateBackupRequest(), - backup=resources.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_backup( + service.DeleteBackupRequest(), + name="name_value", ) -def test_update_backup_rest_error(): +def test_delete_backup_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16454,43 +17304,50 @@ def test_update_backup_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteBackupRequest, + service.ListSupportedDatabaseFlagsRequest, dict, ], ) -def test_delete_backup_rest(request_type): +def test_list_supported_database_flags_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSupportedDatabaseFlagsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_backup(request) + response = client.list_supported_database_flags(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupRequest): +def test_list_supported_database_flags_rest_required_fields( + request_type=service.ListSupportedDatabaseFlagsRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16505,29 +17362,28 @@ def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).list_supported_database_flags._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "etag", - "request_id", - "validate_only", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16536,7 +17392,7 @@ def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSupportedDatabaseFlagsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16548,45 +17404,49 @@ def test_delete_backup_rest_required_fields(request_type=service.DeleteBackupReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_backup(request) + response = client.list_supported_database_flags(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_rest_unset_required_fields(): +def test_list_supported_database_flags_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup._get_unset_required_fields({}) + unset_fields = transport.list_supported_database_flags._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set( ( - "etag", - "requestId", - "validateOnly", + "pageSize", + "pageToken", ) ) - & set(("name",)) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): +def test_list_supported_database_flags_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16599,15 +17459,15 @@ def test_delete_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_delete_backup" + transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" + transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) + pb_message = service.ListSupportedDatabaseFlagsRequest.pb( + service.ListSupportedDatabaseFlagsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16618,19 +17478,19 @@ def test_delete_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = service.ListSupportedDatabaseFlagsResponse.to_json( + service.ListSupportedDatabaseFlagsResponse() ) - request = service.DeleteBackupRequest() + request = service.ListSupportedDatabaseFlagsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = service.ListSupportedDatabaseFlagsResponse() - client.delete_backup( + client.list_supported_database_flags( request, metadata=[ ("key", "val"), @@ -16642,8 +17502,8 @@ def test_delete_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_backup_rest_bad_request( - transport: str = "rest", request_type=service.DeleteBackupRequest +def test_list_supported_database_flags_rest_bad_request( + transport: str = "rest", request_type=service.ListSupportedDatabaseFlagsRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16651,7 +17511,7 @@ def test_delete_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16663,10 +17523,10 @@ def test_delete_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_backup(request) + client.list_supported_database_flags(request) -def test_delete_backup_rest_flattened(): +def test_list_supported_database_flags_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16675,38 +17535,40 @@ def test_delete_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = service.ListSupportedDatabaseFlagsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/backups/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_backup(**mock_args) + client.list_supported_database_flags(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/backups/*}" + "%s/v1alpha/{parent=projects/*/locations/*}/supportedDatabaseFlags" % client.transport._host, args[1], ) -def test_delete_backup_rest_flattened_error(transport: str = "rest"): +def test_list_supported_database_flags_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16715,60 +17577,121 @@ def test_delete_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - service.DeleteBackupRequest(), - name="name_value", + client.list_supported_database_flags( + service.ListSupportedDatabaseFlagsRequest(), + parent="parent_value", ) -def test_delete_backup_rest_error(): +def test_list_supported_database_flags_rest_pager(transport: str = "rest"): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + next_page_token="abc", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[], + next_page_token="def", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + ], + next_page_token="ghi", + ), + service.ListSupportedDatabaseFlagsResponse( + supported_database_flags=[ + resources.SupportedDatabaseFlag(), + resources.SupportedDatabaseFlag(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + service.ListSupportedDatabaseFlagsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_supported_database_flags(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) + + pages = list(client.list_supported_database_flags(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListSupportedDatabaseFlagsRequest, + service.GenerateClientCertificateRequest, dict, ], ) -def test_list_supported_database_flags_rest(request_type): +def test_generate_client_certificate_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSupportedDatabaseFlagsResponse( - next_page_token="next_page_token_value", + return_value = service.GenerateClientCertificateResponse( + pem_certificate="pem_certificate_value", + pem_certificate_chain=["pem_certificate_chain_value"], + ca_cert="ca_cert_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + return_value = service.GenerateClientCertificateResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_supported_database_flags(request) + response = client.generate_client_certificate(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSupportedDatabaseFlagsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, service.GenerateClientCertificateResponse) + assert response.pem_certificate == "pem_certificate_value" + assert response.pem_certificate_chain == ["pem_certificate_chain_value"] + assert response.ca_cert == "ca_cert_value" -def test_list_supported_database_flags_rest_required_fields( - request_type=service.ListSupportedDatabaseFlagsRequest, +def test_generate_client_certificate_rest_required_fields( + request_type=service.GenerateClientCertificateRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -16788,7 +17711,7 @@ def test_list_supported_database_flags_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_supported_database_flags._get_unset_required_fields(jsonified_request) + ).generate_client_certificate._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16797,14 +17720,7 @@ def test_list_supported_database_flags_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_supported_database_flags._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).generate_client_certificate._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16818,7 +17734,7 @@ def test_list_supported_database_flags_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListSupportedDatabaseFlagsResponse() + return_value = service.GenerateClientCertificateResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16830,49 +17746,40 @@ def test_list_supported_database_flags_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + return_value = service.GenerateClientCertificateResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_supported_database_flags(request) + response = client.generate_client_certificate(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_supported_database_flags_rest_unset_required_fields(): +def test_generate_client_certificate_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_supported_database_flags._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.generate_client_certificate._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_supported_database_flags_rest_interceptors(null_interceptor): +def test_generate_client_certificate_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16885,14 +17792,14 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" + transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" + transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListSupportedDatabaseFlagsRequest.pb( - service.ListSupportedDatabaseFlagsRequest() + pb_message = service.GenerateClientCertificateRequest.pb( + service.GenerateClientCertificateRequest() ) transcode.return_value = { "method": "post", @@ -16904,19 +17811,19 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListSupportedDatabaseFlagsResponse.to_json( - service.ListSupportedDatabaseFlagsResponse() + req.return_value._content = service.GenerateClientCertificateResponse.to_json( + service.GenerateClientCertificateResponse() ) - request = service.ListSupportedDatabaseFlagsRequest() + request = service.GenerateClientCertificateRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListSupportedDatabaseFlagsResponse() + post.return_value = service.GenerateClientCertificateResponse() - client.list_supported_database_flags( + client.generate_client_certificate( request, metadata=[ ("key", "val"), @@ -16928,8 +17835,8 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_supported_database_flags_rest_bad_request( - transport: str = "rest", request_type=service.ListSupportedDatabaseFlagsRequest +def test_generate_client_certificate_rest_bad_request( + transport: str = "rest", request_type=service.GenerateClientCertificateRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16937,7 +17844,7 @@ def test_list_supported_database_flags_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16949,10 +17856,10 @@ def test_list_supported_database_flags_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_supported_database_flags(request) + client.generate_client_certificate(request) -def test_list_supported_database_flags_rest_flattened(): +def test_generate_client_certificate_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16961,10 +17868,12 @@ def test_list_supported_database_flags_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListSupportedDatabaseFlagsResponse() + return_value = service.GenerateClientCertificateResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -16976,25 +17885,25 @@ def test_list_supported_database_flags_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListSupportedDatabaseFlagsResponse.pb(return_value) + return_value = service.GenerateClientCertificateResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_supported_database_flags(**mock_args) + client.generate_client_certificate(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/supportedDatabaseFlags" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate" % client.transport._host, args[1], ) -def test_list_supported_database_flags_rest_flattened_error(transport: str = "rest"): +def test_generate_client_certificate_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17003,121 +17912,70 @@ def test_list_supported_database_flags_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_supported_database_flags( - service.ListSupportedDatabaseFlagsRequest(), + client.generate_client_certificate( + service.GenerateClientCertificateRequest(), parent="parent_value", ) -def test_list_supported_database_flags_rest_pager(transport: str = "rest"): +def test_generate_client_certificate_rest_error(): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - next_page_token="abc", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[], - next_page_token="def", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - ], - next_page_token="ghi", - ), - service.ListSupportedDatabaseFlagsResponse( - supported_database_flags=[ - resources.SupportedDatabaseFlag(), - resources.SupportedDatabaseFlag(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - service.ListSupportedDatabaseFlagsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_supported_database_flags(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.SupportedDatabaseFlag) for i in results) - - pages = list(client.list_supported_database_flags(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GenerateClientCertificateRequest, + service.GetConnectionInfoRequest, dict, ], ) -def test_generate_client_certificate_rest(request_type): +def test_get_connection_info_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.GenerateClientCertificateResponse( - pem_certificate="pem_certificate_value", + return_value = resources.ConnectionInfo( + name="name_value", + ip_address="ip_address_value", + public_ip_address="public_ip_address_value", pem_certificate_chain=["pem_certificate_chain_value"], - ca_cert="ca_cert_value", + instance_uid="instance_uid_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.GenerateClientCertificateResponse.pb(return_value) + return_value = resources.ConnectionInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_client_certificate(request) + response = client.get_connection_info(request) # Establish that the response is the type that we expect. - assert isinstance(response, service.GenerateClientCertificateResponse) - assert response.pem_certificate == "pem_certificate_value" + assert isinstance(response, resources.ConnectionInfo) + assert response.name == "name_value" + assert response.ip_address == "ip_address_value" + assert response.public_ip_address == "public_ip_address_value" assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.ca_cert == "ca_cert_value" + assert response.instance_uid == "instance_uid_value" -def test_generate_client_certificate_rest_required_fields( - request_type=service.GenerateClientCertificateRequest, +def test_get_connection_info_rest_required_fields( + request_type=service.GetConnectionInfoRequest, ): transport_class = transports.AlloyDBAdminRestTransport @@ -17137,7 +17995,7 @@ def test_generate_client_certificate_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_client_certificate._get_unset_required_fields(jsonified_request) + ).get_connection_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17146,7 +18004,9 @@ def test_generate_client_certificate_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_client_certificate._get_unset_required_fields(jsonified_request) + ).get_connection_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17160,7 +18020,7 @@ def test_generate_client_certificate_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.GenerateClientCertificateResponse() + return_value = resources.ConnectionInfo() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17172,40 +18032,39 @@ def test_generate_client_certificate_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.GenerateClientCertificateResponse.pb(return_value) + return_value = resources.ConnectionInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_client_certificate(request) + response = client.get_connection_info(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_generate_client_certificate_rest_unset_required_fields(): +def test_get_connection_info_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.generate_client_certificate._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.get_connection_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_client_certificate_rest_interceptors(null_interceptor): +def test_get_connection_info_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17218,14 +18077,14 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" + transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GenerateClientCertificateRequest.pb( - service.GenerateClientCertificateRequest() + pb_message = service.GetConnectionInfoRequest.pb( + service.GetConnectionInfoRequest() ) transcode.return_value = { "method": "post", @@ -17237,19 +18096,19 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.GenerateClientCertificateResponse.to_json( - service.GenerateClientCertificateResponse() + req.return_value._content = resources.ConnectionInfo.to_json( + resources.ConnectionInfo() ) - request = service.GenerateClientCertificateRequest() + request = service.GetConnectionInfoRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.GenerateClientCertificateResponse() + post.return_value = resources.ConnectionInfo() - client.generate_client_certificate( + client.get_connection_info( request, metadata=[ ("key", "val"), @@ -17261,8 +18120,8 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): post.assert_called_once() -def test_generate_client_certificate_rest_bad_request( - transport: str = "rest", request_type=service.GenerateClientCertificateRequest +def test_get_connection_info_rest_bad_request( + transport: str = "rest", request_type=service.GetConnectionInfoRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17270,7 +18129,9 @@ def test_generate_client_certificate_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17282,10 +18143,10 @@ def test_generate_client_certificate_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.generate_client_certificate(request) + client.get_connection_info(request) -def test_generate_client_certificate_rest_flattened(): +def test_get_connection_info_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17294,11 +18155,11 @@ def test_generate_client_certificate_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.GenerateClientCertificateResponse() + return_value = resources.ConnectionInfo() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" + "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" } # get truthy value for each flattened field @@ -17311,25 +18172,25 @@ def test_generate_client_certificate_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.GenerateClientCertificateResponse.pb(return_value) + return_value = resources.ConnectionInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.generate_client_certificate(**mock_args) + client.get_connection_info(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}:generateClientCertificate" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo" % client.transport._host, args[1], ) -def test_generate_client_certificate_rest_flattened_error(transport: str = "rest"): +def test_get_connection_info_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17338,13 +18199,13 @@ def test_generate_client_certificate_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_client_certificate( - service.GenerateClientCertificateRequest(), + client.get_connection_info( + service.GetConnectionInfoRequest(), parent="parent_value", ) -def test_generate_client_certificate_rest_error(): +def test_get_connection_info_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17353,54 +18214,46 @@ def test_generate_client_certificate_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.GetConnectionInfoRequest, + service.ListUsersRequest, dict, ], ) -def test_get_connection_info_rest(request_type): +def test_list_users_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConnectionInfo( - name="name_value", - ip_address="ip_address_value", - pem_certificate_chain=["pem_certificate_chain_value"], - instance_uid="instance_uid_value", + return_value = service.ListUsersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConnectionInfo.pb(return_value) + return_value = service.ListUsersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_connection_info(request) + response = client.list_users(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.ConnectionInfo) - assert response.name == "name_value" - assert response.ip_address == "ip_address_value" - assert response.pem_certificate_chain == ["pem_certificate_chain_value"] - assert response.instance_uid == "instance_uid_value" + assert isinstance(response, pagers.ListUsersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_connection_info_rest_required_fields( - request_type=service.GetConnectionInfoRequest, -): +def test_list_users_rest_required_fields(request_type=service.ListUsersRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} @@ -17419,7 +18272,7 @@ def test_get_connection_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_connection_info._get_unset_required_fields(jsonified_request) + ).list_users._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17428,9 +18281,16 @@ def test_get_connection_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_connection_info._get_unset_required_fields(jsonified_request) + ).list_users._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17444,7 +18304,7 @@ def test_get_connection_info_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.ConnectionInfo() + return_value = service.ListUsersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17465,30 +18325,40 @@ def test_get_connection_info_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConnectionInfo.pb(return_value) + return_value = service.ListUsersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_connection_info(request) + response = client.list_users(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_connection_info_rest_unset_required_fields(): +def test_list_users_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_connection_info._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("parent",))) + unset_fields = transport.list_users._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_connection_info_rest_interceptors(null_interceptor): +def test_list_users_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17501,15 +18371,13 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" + transports.AlloyDBAdminRestInterceptor, "post_list_users" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" + transports.AlloyDBAdminRestInterceptor, "pre_list_users" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetConnectionInfoRequest.pb( - service.GetConnectionInfoRequest() - ) + pb_message = service.ListUsersRequest.pb(service.ListUsersRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17520,19 +18388,19 @@ def test_get_connection_info_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.ConnectionInfo.to_json( - resources.ConnectionInfo() + req.return_value._content = service.ListUsersResponse.to_json( + service.ListUsersResponse() ) - request = service.GetConnectionInfoRequest() + request = service.ListUsersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.ConnectionInfo() + post.return_value = service.ListUsersResponse() - client.get_connection_info( + client.list_users( request, metadata=[ ("key", "val"), @@ -17544,8 +18412,8 @@ def test_get_connection_info_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_connection_info_rest_bad_request( - transport: str = "rest", request_type=service.GetConnectionInfoRequest +def test_list_users_rest_bad_request( + transport: str = "rest", request_type=service.ListUsersRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17553,9 +18421,7 @@ def test_get_connection_info_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17567,10 +18433,10 @@ def test_get_connection_info_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_connection_info(request) + client.list_users(request) -def test_get_connection_info_rest_flattened(): +def test_list_users_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17579,11 +18445,11 @@ def test_get_connection_info_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.ConnectionInfo() + return_value = service.ListUsersResponse() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3/instances/sample4" + "parent": "projects/sample1/locations/sample2/clusters/sample3" } # get truthy value for each flattened field @@ -17596,25 +18462,25 @@ def test_get_connection_info_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.ConnectionInfo.pb(return_value) + return_value = service.ListUsersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_connection_info(**mock_args) + client.list_users(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*/instances/*}/connectionInfo" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/users" % client.transport._host, args[1], ) -def test_get_connection_info_rest_flattened_error(transport: str = "rest"): +def test_list_users_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17623,65 +18489,128 @@ def test_get_connection_info_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_connection_info( - service.GetConnectionInfoRequest(), + client.list_users( + service.ListUsersRequest(), parent="parent_value", ) -def test_get_connection_info_rest_error(): +def test_list_users_rest_pager(transport: str = "rest"): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + resources.User(), + ], + next_page_token="abc", + ), + service.ListUsersResponse( + users=[], + next_page_token="def", + ), + service.ListUsersResponse( + users=[ + resources.User(), + ], + next_page_token="ghi", + ), + service.ListUsersResponse( + users=[ + resources.User(), + resources.User(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListUsersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_users(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.User) for i in results) + + pages = list(client.list_users(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - service.ListUsersRequest, + service.GetUserRequest, dict, ], ) -def test_list_users_rest(request_type): +def test_get_user_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListUsersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = resources.User( + name="name_value", + password="password_value", + database_roles=["database_roles_value"], + user_type=resources.User.UserType.ALLOYDB_BUILT_IN, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListUsersResponse.pb(return_value) + return_value = resources.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_users(request) + response = client.get_user(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, resources.User) + assert response.name == "name_value" + assert response.password == "password_value" + assert response.database_roles == ["database_roles_value"] + assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN -def test_list_users_rest_required_fields(request_type=service.ListUsersRequest): +def test_get_user_rest_required_fields(request_type=service.GetUserRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17696,30 +18625,21 @@ def test_list_users_rest_required_fields(request_type=service.ListUsersRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) + ).get_user._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_user._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17728,7 +18648,7 @@ def test_list_users_rest_required_fields(request_type=service.ListUsersRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListUsersResponse() + return_value = resources.User() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17749,40 +18669,30 @@ def test_list_users_rest_required_fields(request_type=service.ListUsersRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListUsersResponse.pb(return_value) + return_value = resources.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_users(request) + response = client.get_user(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_users_rest_unset_required_fields(): +def test_get_user_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_users._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_user._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_users_rest_interceptors(null_interceptor): +def test_get_user_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17795,13 +18705,13 @@ def test_list_users_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_users" + transports.AlloyDBAdminRestInterceptor, "post_get_user" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_users" + transports.AlloyDBAdminRestInterceptor, "pre_get_user" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListUsersRequest.pb(service.ListUsersRequest()) + pb_message = service.GetUserRequest.pb(service.GetUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17812,19 +18722,17 @@ def test_list_users_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListUsersResponse.to_json( - service.ListUsersResponse() - ) + req.return_value._content = resources.User.to_json(resources.User()) - request = service.ListUsersRequest() + request = service.GetUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListUsersResponse() + post.return_value = resources.User() - client.list_users( + client.get_user( request, metadata=[ ("key", "val"), @@ -17836,8 +18744,8 @@ def test_list_users_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_users_rest_bad_request( - transport: str = "rest", request_type=service.ListUsersRequest +def test_get_user_rest_bad_request( + transport: str = "rest", request_type=service.GetUserRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17845,7 +18753,9 @@ def test_list_users_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17857,10 +18767,10 @@ def test_list_users_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_users(request) + client.get_user(request) -def test_list_users_rest_flattened(): +def test_get_user_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17869,16 +18779,16 @@ def test_list_users_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListUsersResponse() + return_value = resources.User() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -17886,25 +18796,25 @@ def test_list_users_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListUsersResponse.pb(return_value) + return_value = resources.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_users(**mock_args) + client.get_user(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/users" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/users/*}" % client.transport._host, args[1], ) -def test_list_users_rest_flattened_error(transport: str = "rest"): +def test_get_user_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17913,92 +18823,106 @@ def test_list_users_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_users( - service.ListUsersRequest(), - parent="parent_value", + client.get_user( + service.GetUserRequest(), + name="name_value", ) -def test_list_users_rest_pager(transport: str = "rest"): +def test_get_user_rest_error(): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - resources.User(), - ], - next_page_token="abc", - ), - service.ListUsersResponse( - users=[], - next_page_token="def", - ), - service.ListUsersResponse( - users=[ - resources.User(), - ], - next_page_token="ghi", - ), - service.ListUsersResponse( - users=[ - resources.User(), - resources.User(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListUsersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" - } - - pager = client.list_users(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.User) for i in results) - - pages = list(client.list_users(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetUserRequest, + service.CreateUserRequest, dict, ], ) -def test_get_user_rest(request_type): +def test_create_user_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["user"] = { + "name": "name_value", + "password": "password_value", + "database_roles": ["database_roles_value1", "database_roles_value2"], + "user_type": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateUserRequest.meta.fields["user"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["user"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["user"][field])): + del request_init["user"][field][i][subfield] + else: + del request_init["user"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18020,7 +18944,7 @@ def test_get_user_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_user(request) + response = client.create_user(request) # Establish that the response is the type that we expect. assert isinstance(response, resources.User) @@ -18030,11 +18954,12 @@ def test_get_user_rest(request_type): assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN -def test_get_user_rest_required_fields(request_type=service.GetUserRequest): +def test_create_user_rest_required_fields(request_type=service.CreateUserRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["user_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18046,24 +18971,38 @@ def test_get_user_rest_required_fields(request_type=service.GetUserRequest): ) # verify fields with default values are dropped + assert "userId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_user._get_unset_required_fields(jsonified_request) + ).create_user._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "userId" in jsonified_request + assert jsonified_request["userId"] == request_init["user_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["userId"] = "user_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_user._get_unset_required_fields(jsonified_request) + ).create_user._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "user_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "userId" in jsonified_request + assert jsonified_request["userId"] == "user_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18084,9 +19023,10 @@ def test_get_user_rest_required_fields(request_type=service.GetUserRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -18099,24 +19039,45 @@ def test_get_user_rest_required_fields(request_type=service.GetUserRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_user(request) + response = client.create_user(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "userId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_user_rest_unset_required_fields(): +def test_create_user_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_user._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_user._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "userId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "userId", + "user", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_user_rest_interceptors(null_interceptor): +def test_create_user_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18129,13 +19090,13 @@ def test_get_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_get_user" + transports.AlloyDBAdminRestInterceptor, "post_create_user" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_get_user" + transports.AlloyDBAdminRestInterceptor, "pre_create_user" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetUserRequest.pb(service.GetUserRequest()) + pb_message = service.CreateUserRequest.pb(service.CreateUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18148,7 +19109,7 @@ def test_get_user_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = resources.User.to_json(resources.User()) - request = service.GetUserRequest() + request = service.CreateUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18156,7 +19117,7 @@ def test_get_user_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = resources.User() - client.get_user( + client.create_user( request, metadata=[ ("key", "val"), @@ -18168,8 +19129,8 @@ def test_get_user_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_user_rest_bad_request( - transport: str = "rest", request_type=service.GetUserRequest +def test_create_user_rest_bad_request( + transport: str = "rest", request_type=service.CreateUserRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18177,9 +19138,7 @@ def test_get_user_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18191,10 +19150,10 @@ def test_get_user_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_user(request) + client.create_user(request) -def test_get_user_rest_flattened(): +def test_create_user_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18207,12 +19166,14 @@ def test_get_user_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + "parent": "projects/sample1/locations/sample2/clusters/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + user=resources.User(name="name_value"), + user_id="user_id_value", ) mock_args.update(sample_request) @@ -18225,20 +19186,20 @@ def test_get_user_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_user(**mock_args) + client.create_user(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/users/*}" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/users" % client.transport._host, args[1], ) -def test_get_user_rest_flattened_error(transport: str = "rest"): +def test_create_user_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18247,13 +19208,15 @@ def test_get_user_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_user( - service.GetUserRequest(), - name="name_value", + client.create_user( + service.CreateUserRequest(), + parent="parent_value", + user=resources.User(name="name_value"), + user_id="user_id_value", ) -def test_get_user_rest_error(): +def test_create_user_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18262,20 +19225,24 @@ def test_get_user_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.CreateUserRequest, + service.UpdateUserRequest, dict, ], ) -def test_create_user_rest(request_type): +def test_update_user_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "user": { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + } request_init["user"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4", "password": "password_value", "database_roles": ["database_roles_value1", "database_roles_value2"], "user_type": 1, @@ -18285,7 +19252,7 @@ def test_create_user_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateUserRequest.meta.fields["user"] + test_field = service.UpdateUserRequest.meta.fields["user"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18368,7 +19335,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_user(request) + response = client.update_user(request) # Establish that the response is the type that we expect. assert isinstance(response, resources.User) @@ -18378,12 +19345,10 @@ def get_message_fields(field): assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN -def test_create_user_rest_required_fields(request_type=service.CreateUserRequest): +def test_update_user_rest_required_fields(request_type=service.UpdateUserRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["user_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18395,38 +19360,29 @@ def test_create_user_rest_required_fields(request_type=service.CreateUserRequest ) # verify fields with default values are dropped - assert "userId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_user._get_unset_required_fields(jsonified_request) + ).update_user._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "userId" in jsonified_request - assert jsonified_request["userId"] == request_init["user_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["userId"] = "user_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_user._get_unset_required_fields(jsonified_request) + ).update_user._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "allow_missing", "request_id", - "user_id", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "userId" in jsonified_request - assert jsonified_request["userId"] == "user_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18447,7 +19403,7 @@ def test_create_user_rest_required_fields(request_type=service.CreateUserRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18463,45 +19419,34 @@ def test_create_user_rest_required_fields(request_type=service.CreateUserRequest response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_user(request) + response = client.update_user(request) - expected_params = [ - ( - "userId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_user_rest_unset_required_fields(): +def test_update_user_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_user._get_unset_required_fields({}) + unset_fields = transport.update_user._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( + "allowMissing", "requestId", - "userId", + "updateMask", "validateOnly", ) ) - & set( - ( - "parent", - "userId", - "user", - ) - ) + & set(("user",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_user_rest_interceptors(null_interceptor): +def test_update_user_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18514,13 +19459,13 @@ def test_create_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_create_user" + transports.AlloyDBAdminRestInterceptor, "post_update_user" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_create_user" + transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateUserRequest.pb(service.CreateUserRequest()) + pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18533,7 +19478,7 @@ def test_create_user_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = resources.User.to_json(resources.User()) - request = service.CreateUserRequest() + request = service.UpdateUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18541,7 +19486,7 @@ def test_create_user_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = resources.User() - client.create_user( + client.update_user( request, metadata=[ ("key", "val"), @@ -18553,8 +19498,8 @@ def test_create_user_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_user_rest_bad_request( - transport: str = "rest", request_type=service.CreateUserRequest +def test_update_user_rest_bad_request( + transport: str = "rest", request_type=service.UpdateUserRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18562,7 +19507,11 @@ def test_create_user_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init = { + "user": { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18574,10 +19523,10 @@ def test_create_user_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_user(request) + client.update_user(request) -def test_create_user_rest_flattened(): +def test_update_user_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18590,14 +19539,15 @@ def test_create_user_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" + "user": { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", user=resources.User(name="name_value"), - user_id="user_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -18610,20 +19560,20 @@ def test_create_user_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_user(**mock_args) + client.update_user(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/users" + "%s/v1alpha/{user.name=projects/*/locations/*/clusters/*/users/*}" % client.transport._host, args[1], ) -def test_create_user_rest_flattened_error(transport: str = "rest"): +def test_update_user_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18632,15 +19582,14 @@ def test_create_user_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_user( - service.CreateUserRequest(), - parent="parent_value", + client.update_user( + service.UpdateUserRequest(), user=resources.User(name="name_value"), - user_id="user_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_user_rest_error(): +def test_update_user_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18649,11 +19598,11 @@ def test_create_user_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.UpdateUserRequest, + service.DeleteUserRequest, dict, ], ) -def test_update_user_rest(request_type): +def test_delete_user_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18661,118 +19610,33 @@ def test_update_user_rest(request_type): # send a request that will satisfy transcoding request_init = { - "user": { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } - } - request_init["user"] = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4", - "password": "password_value", - "database_roles": ["database_roles_value1", "database_roles_value2"], - "user_type": 1, + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateUserRequest.meta.fields["user"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["user"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["user"][field])): - del request_init["user"][field][i][subfield] - else: - del request_init["user"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.User( - name="name_value", - password="password_value", - database_roles=["database_roles_value"], - user_type=resources.User.UserType.ALLOYDB_BUILT_IN, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.User.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_user(request) + response = client.delete_user(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.User) - assert response.name == "name_value" - assert response.password == "password_value" - assert response.database_roles == ["database_roles_value"] - assert response.user_type == resources.User.UserType.ALLOYDB_BUILT_IN + assert response is None -def test_update_user_rest_required_fields(request_type=service.UpdateUserRequest): +def test_delete_user_rest_required_fields(request_type=service.DeleteUserRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18787,26 +19651,28 @@ def test_update_user_rest_required_fields(request_type=service.UpdateUserRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_user._get_unset_required_fields(jsonified_request) + ).delete_user._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_user._get_unset_required_fields(jsonified_request) + ).delete_user._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", "request_id", - "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18815,7 +19681,7 @@ def test_update_user_rest_required_fields(request_type=service.UpdateUserRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.User() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18827,50 +19693,44 @@ def test_update_user_rest_required_fields(request_type=service.UpdateUserRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resources.User.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_user(request) + response = client.delete_user(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_user_rest_unset_required_fields(): +def test_delete_user_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_user._get_unset_required_fields({}) + unset_fields = transport.delete_user._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", "requestId", - "updateMask", "validateOnly", ) ) - & set(("user",)) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_user_rest_interceptors(null_interceptor): +def test_delete_user_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18883,13 +19743,10 @@ def test_update_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_update_user" - ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_update_user" + transports.AlloyDBAdminRestInterceptor, "pre_delete_user" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) + pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18900,17 +19757,15 @@ def test_update_user_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = resources.User.to_json(resources.User()) - request = service.UpdateUserRequest() + request = service.DeleteUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.User() - client.update_user( + client.delete_user( request, metadata=[ ("key", "val"), @@ -18919,11 +19774,10 @@ def test_update_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_user_rest_bad_request( - transport: str = "rest", request_type=service.UpdateUserRequest +def test_delete_user_rest_bad_request( + transport: str = "rest", request_type=service.DeleteUserRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18932,9 +19786,7 @@ def test_update_user_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "user": { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" } request = request_type(**request_init) @@ -18947,10 +19799,10 @@ def test_update_user_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_user(request) + client.delete_user(request) -def test_update_user_rest_flattened(): +def test_delete_user_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18959,45 +19811,40 @@ def test_update_user_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.User() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "user": { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" } # get truthy value for each flattened field mock_args = dict( - user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.User.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_user(**mock_args) + client.delete_user(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{user.name=projects/*/locations/*/clusters/*/users/*}" + "%s/v1alpha/{name=projects/*/locations/*/clusters/*/users/*}" % client.transport._host, args[1], ) -def test_update_user_rest_flattened_error(transport: str = "rest"): +def test_delete_user_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19006,14 +19853,13 @@ def test_update_user_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_user( - service.UpdateUserRequest(), - user=resources.User(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_user( + service.DeleteUserRequest(), + name="name_value", ) -def test_update_user_rest_error(): +def test_delete_user_rest_error(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19022,45 +19868,48 @@ def test_update_user_rest_error(): @pytest.mark.parametrize( "request_type", [ - service.DeleteUserRequest, + service.ListDatabasesRequest, dict, ], ) -def test_delete_user_rest(request_type): +def test_list_databases_rest(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_user(request) + response = client.list_databases(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_user_rest_required_fields(request_type=service.DeleteUserRequest): +def test_list_databases_rest_required_fields(request_type=service.ListDatabasesRequest): transport_class = transports.AlloyDBAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19075,28 +19924,29 @@ def test_delete_user_rest_required_fields(request_type=service.DeleteUserRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_user._get_unset_required_fields(jsonified_request) + ).list_databases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_user._get_unset_required_fields(jsonified_request) + ).list_databases._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "validate_only", + "filter", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19105,7 +19955,7 @@ def test_delete_user_rest_required_fields(request_type=service.DeleteUserRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = service.ListDatabasesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19117,44 +19967,48 @@ def test_delete_user_rest_required_fields(request_type=service.DeleteUserRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_user(request) + response = client.list_databases(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_user_rest_unset_required_fields(): +def test_list_databases_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_user._get_unset_required_fields({}) + unset_fields = transport.list_databases._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "validateOnly", + "filter", + "pageSize", + "pageToken", ) ) - & set(("name",)) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_user_rest_interceptors(null_interceptor): +def test_list_databases_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19167,10 +20021,13 @@ def test_delete_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_user" + transports.AlloyDBAdminRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() - pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) + post.assert_not_called() + pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19181,15 +20038,19 @@ def test_delete_user_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = service.ListDatabasesResponse.to_json( + service.ListDatabasesResponse() + ) - request = service.DeleteUserRequest() + request = service.ListDatabasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = service.ListDatabasesResponse() - client.delete_user( + client.list_databases( request, metadata=[ ("key", "val"), @@ -19198,10 +20059,11 @@ def test_delete_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_user_rest_bad_request( - transport: str = "rest", request_type=service.DeleteUserRequest +def test_list_databases_rest_bad_request( + transport: str = "rest", request_type=service.ListDatabasesRequest ): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19209,9 +20071,7 @@ def test_delete_user_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19223,10 +20083,10 @@ def test_delete_user_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_user(request) + client.list_databases(request) -def test_delete_user_rest_flattened(): +def test_list_databases_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19235,40 +20095,42 @@ def test_delete_user_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = service.ListDatabasesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + "parent": "projects/sample1/locations/sample2/clusters/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_user(**mock_args) + client.list_databases(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/clusters/*/users/*}" + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/databases" % client.transport._host, args[1], ) -def test_delete_user_rest_flattened_error(transport: str = "rest"): +def test_list_databases_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19277,17 +20139,74 @@ def test_delete_user_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_user( - service.DeleteUserRequest(), - name="name_value", + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", ) -def test_delete_user_rest_error(): +def test_list_databases_rest_pager(transport: str = "rest"): client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -19459,6 +20378,7 @@ def test_alloy_db_admin_base_transport(): "create_user", "update_user", "delete_user", + "list_databases", "get_location", "list_locations", "get_operation", @@ -19835,6 +20755,9 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.delete_user._session session2 = client2.transport.delete_user._session assert session1 != session2 + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 def test_alloy_db_admin_grpc_transport_channel(): @@ -20106,11 +21029,40 @@ def test_parse_crypto_key_version_path(): assert expected == actual -def test_instance_path(): +def test_database_path(): project = "cuttlefish" location = "mussel" cluster = "winkle" - instance = "nautilus" + database = "nautilus" + expected = "projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}".format( + project=project, + location=location, + cluster=cluster, + database=database, + ) + actual = AlloyDBAdminClient.database_path(project, location, cluster, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "scallop", + "location": "abalone", + "cluster": "squid", + "database": "clam", + } + path = AlloyDBAdminClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_database_path(path) + assert expected == actual + + +def test_instance_path(): + project = "whelk" + location = "octopus" + cluster = "oyster" + instance = "nudibranch" expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( project=project, location=location, @@ -20123,10 +21075,10 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "scallop", - "location": "abalone", - "cluster": "squid", - "instance": "clam", + "project": "cuttlefish", + "location": "mussel", + "cluster": "winkle", + "instance": "nautilus", } path = AlloyDBAdminClient.instance_path(**expected) @@ -20136,8 +21088,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "whelk" - network = "octopus" + project = "scallop" + network = "abalone" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -20148,8 +21100,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "oyster", - "network": "nudibranch", + "project": "squid", + "network": "clam", } path = AlloyDBAdminClient.network_path(**expected) @@ -20159,9 +21111,9 @@ def test_parse_network_path(): def test_supported_database_flag_path(): - project = "cuttlefish" - location = "mussel" - flag = "winkle" + project = "whelk" + location = "octopus" + flag = "oyster" expected = "projects/{project}/locations/{location}/flags/{flag}".format( project=project, location=location, @@ -20173,9 +21125,9 @@ def test_supported_database_flag_path(): def test_parse_supported_database_flag_path(): expected = { - "project": "nautilus", - "location": "scallop", - "flag": "abalone", + "project": "nudibranch", + "location": "cuttlefish", + "flag": "mussel", } path = AlloyDBAdminClient.supported_database_flag_path(**expected) @@ -20185,10 +21137,10 @@ def test_parse_supported_database_flag_path(): def test_user_path(): - project = "squid" - location = "clam" - cluster = "whelk" - user = "octopus" + project = "winkle" + location = "nautilus" + cluster = "scallop" + user = "abalone" expected = "projects/{project}/locations/{location}/clusters/{cluster}/users/{user}".format( project=project, location=location, @@ -20201,10 +21153,10 @@ def test_user_path(): def test_parse_user_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "cluster": "cuttlefish", - "user": "mussel", + "project": "squid", + "location": "clam", + "cluster": "whelk", + "user": "octopus", } path = AlloyDBAdminClient.user_path(**expected) @@ -20214,7 +21166,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -20224,7 +21176,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = AlloyDBAdminClient.common_billing_account_path(**expected) @@ -20234,7 +21186,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -20244,7 +21196,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = AlloyDBAdminClient.common_folder_path(**expected) @@ -20254,7 +21206,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -20264,7 +21216,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = AlloyDBAdminClient.common_organization_path(**expected) @@ -20274,7 +21226,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -20284,7 +21236,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = AlloyDBAdminClient.common_project_path(**expected) @@ -20294,8 +21246,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -20306,8 +21258,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = AlloyDBAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index c2e34ee06b17..004a0d3c918e 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,32 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.22.0...google-cloud-asset-v3.23.0) (2024-01-04) + + +### Features + +* Added new resource references to fields in AnalyzeMoveRequest ([599e175](https://github.com/googleapis/google-cloud-python/commit/599e1754f44f934060c935f0af4d88412edda582)) + + +### Documentation + +* Updated comments ([599e175](https://github.com/googleapis/google-cloud-python/commit/599e1754f44f934060c935f0af4d88412edda582)) + +## [3.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.21.0...google-cloud-asset-v3.22.0) (2023-12-12) + + +### Features + +* added Asset.access_policy, access_level, service_perimeter, org_policy ([0fc00b8](https://github.com/googleapis/google-cloud-python/commit/0fc00b8514fa29dd183381e5dac8f712a37c2f34)) +* added messages ExportAssetsResponse, BatchGetAssetsHistoryResponse ([0fc00b8](https://github.com/googleapis/google-cloud-python/commit/0fc00b8514fa29dd183381e5dac8f712a37c2f34)) +* added resource definitions to some messages ([0fc00b8](https://github.com/googleapis/google-cloud-python/commit/0fc00b8514fa29dd183381e5dac8f712a37c2f34)) + + +### Documentation + +* updated comments ([0fc00b8](https://github.com/googleapis/google-cloud-python/commit/0fc00b8514fa29dd183381e5dac8f712a37c2f34)) + ## [3.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.20.1...google-cloud-asset-v3.21.0) (2023-12-07) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 3d872171749b..d19a72a3bcfa 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 3d872171749b..d19a72a3bcfa 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py index b1886de65b95..7957c84c6caa 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1229,38 +1229,36 @@ async def sample_search_all_resources(): have a label ``env``. - ``tagKeys:env`` to find Google Cloud resources that have directly attached tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``tagValues:prod*`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``tagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``effectiveTagKeys:env`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``effectiveTagValues:prod*`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``effectiveTagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``kmsKey:key`` to find Google Cloud resources encrypted with a customer-managed encryption key whose name contains ``key`` as a word. This field is - deprecated. Please use the ``kmsKeys`` field to - retrieve Cloud KMS key information. + deprecated. Use the ``kmsKeys`` field to retrieve + Cloud KMS key information. - ``kmsKeys:key`` to find Google Cloud resources encrypted with customer-managed encryption keys whose name contains the word ``key``. @@ -1275,6 +1273,12 @@ async def sample_search_all_resources(): relationships with ``instance-group-1`` in the Compute Engine instance group resource name, for relationship type ``INSTANCE_TO_INSTANCEGROUP``. + - ``sccSecurityMarks.key=value`` to find Cloud + resources that are attached with security marks whose + key is ``key`` and value is ``value``. + - ``sccSecurityMarks.key:*`` to find Cloud resources + that are attached with security marks whose key is + ``key``. - ``state:ACTIVE`` to find Google Cloud resources whose state contains ``ACTIVE`` as a word. - ``NOT state:ACTIVE`` to find Google Cloud resources @@ -1308,7 +1312,7 @@ async def sample_search_all_resources(): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset - types `__. + types `__. Regular expressions are also supported. For example: @@ -2767,13 +2771,17 @@ async def sample_analyze_org_policies(): filter (:class:`str`): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - ``consolidated_policy.attached_resource``, and the only - supported operator is ``=``. + Filtering is currently available for bare literal values + and the following fields: - Example: + - consolidated_policy.attached_resource + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results of"folders/001". + will return all the Organization Policy results attached + to "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -2928,13 +2936,18 @@ async def sample_analyze_org_policy_governed_containers(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - The expression to filter the governed containers in - result. The only supported field is ``parent``, and the - only supported operator is ``=``. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for bare literal values + and the following fields: - Example: + - parent + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under "folders/001". + will return all the containers under "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -3049,7 +3062,7 @@ async def analyze_org_policy_governed_assets( This RPC only returns either resources of types supported by `searchable asset - types `__, + types `__, or IAM policies. .. code-block:: python @@ -3107,24 +3120,40 @@ async def sample_analyze_org_policy_governed_assets(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - The expression to filter the governed assets in result. - The only supported fields for governed resources are - ``governed_resource.project`` and - ``governed_resource.folders``. The only supported fields - for governed iam policies are - ``governed_iam_policy.project`` and - ``governed_iam_policy.folders``. The only supported - operator is ``=``. - - Example 1: governed_resource.project="projects/12345678" - filter will return all governed resources under - projects/12345678 including the project ifself, if - applicable. - - Example 2: - governed_iam_policy.folders="folders/12345678" filter - will return all governed iam policies under - folders/12345678, if applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is currently available + for bare literal values and the following fields: + + - governed_resource.project + - governed_resource.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_resource.project`` or + ``consolidated_policy.rules.enforce``, the only + supported operator is ``=``. When filtering by + ``governed_resource.folders``, the supported + operators are ``=`` and ``:``. For example, filtering + by ``governed_resource.project="projects/12345678"`` + will return all the governed resources under + "projects/12345678", including the project itself if + applicable. + + For governed IAM policies, filtering is currently + available for bare literal values and the following + fields: + + - governed_iam_policy.project + - governed_iam_policy.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_iam_policy.project`` or + ``consolidated_policy.rules.enforce``, the only + supported operator is ``=``. When filtering by + ``governed_iam_policy.folders``, the supported + operators are ``=`` and ``:``. For example, filtering + by ``governed_iam_policy.folders:"folders/12345678"`` + will return all the governed IAM policies under + "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index fa28082fbe76..ef8c1f7c668f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1500,38 +1500,36 @@ def sample_search_all_resources(): have a label ``env``. - ``tagKeys:env`` to find Google Cloud resources that have directly attached tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``tagValues:prod*`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``tagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``effectiveTagKeys:env`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``effectiveTagValues:prod*`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``effectiveTagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``kmsKey:key`` to find Google Cloud resources encrypted with a customer-managed encryption key whose name contains ``key`` as a word. This field is - deprecated. Please use the ``kmsKeys`` field to - retrieve Cloud KMS key information. + deprecated. Use the ``kmsKeys`` field to retrieve + Cloud KMS key information. - ``kmsKeys:key`` to find Google Cloud resources encrypted with customer-managed encryption keys whose name contains the word ``key``. @@ -1546,6 +1544,12 @@ def sample_search_all_resources(): relationships with ``instance-group-1`` in the Compute Engine instance group resource name, for relationship type ``INSTANCE_TO_INSTANCEGROUP``. + - ``sccSecurityMarks.key=value`` to find Cloud + resources that are attached with security marks whose + key is ``key`` and value is ``value``. + - ``sccSecurityMarks.key:*`` to find Cloud resources + that are attached with security marks whose key is + ``key``. - ``state:ACTIVE`` to find Google Cloud resources whose state contains ``ACTIVE`` as a word. - ``NOT state:ACTIVE`` to find Google Cloud resources @@ -1579,7 +1583,7 @@ def sample_search_all_resources(): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset - types `__. + types `__. Regular expressions are also supported. For example: @@ -2972,13 +2976,17 @@ def sample_analyze_org_policies(): filter (str): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - ``consolidated_policy.attached_resource``, and the only - supported operator is ``=``. + Filtering is currently available for bare literal values + and the following fields: - Example: + - consolidated_policy.attached_resource + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results of"folders/001". + will return all the Organization Policy results attached + to "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -3123,13 +3131,18 @@ def sample_analyze_org_policy_governed_containers(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - The expression to filter the governed containers in - result. The only supported field is ``parent``, and the - only supported operator is ``=``. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for bare literal values + and the following fields: - Example: + - parent + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under "folders/001". + will return all the containers under "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -3238,7 +3251,7 @@ def analyze_org_policy_governed_assets( This RPC only returns either resources of types supported by `searchable asset - types `__, + types `__, or IAM policies. .. code-block:: python @@ -3296,24 +3309,40 @@ def sample_analyze_org_policy_governed_assets(): on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - The expression to filter the governed assets in result. - The only supported fields for governed resources are - ``governed_resource.project`` and - ``governed_resource.folders``. The only supported fields - for governed iam policies are - ``governed_iam_policy.project`` and - ``governed_iam_policy.folders``. The only supported - operator is ``=``. - - Example 1: governed_resource.project="projects/12345678" - filter will return all governed resources under - projects/12345678 including the project ifself, if - applicable. - - Example 2: - governed_iam_policy.folders="folders/12345678" filter - will return all governed iam policies under - folders/12345678, if applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is currently available + for bare literal values and the following fields: + + - governed_resource.project + - governed_resource.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_resource.project`` or + ``consolidated_policy.rules.enforce``, the only + supported operator is ``=``. When filtering by + ``governed_resource.folders``, the supported + operators are ``=`` and ``:``. For example, filtering + by ``governed_resource.project="projects/12345678"`` + will return all the governed resources under + "projects/12345678", including the project itself if + applicable. + + For governed IAM policies, filtering is currently + available for bare literal values and the following + fields: + + - governed_iam_policy.project + - governed_iam_policy.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_iam_policy.project`` or + ``consolidated_policy.rules.enforce``, the only + supported operator is ``=``. When filtering by + ``governed_iam_policy.folders``, the supported + operators are ``=`` and ``:``. For example, filtering + by ``governed_iam_policy.folders:"folders/12345678"`` + will return all the governed IAM policies under + "folders/001". This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index fbd9de25e1ae..0a1aa65dad86 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -942,7 +942,7 @@ def analyze_org_policy_governed_assets( This RPC only returns either resources of types supported by `searchable asset - types `__, + types `__, or IAM policies. Returns: diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index d9da0888dd3d..d545361c821e 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -964,7 +964,7 @@ def analyze_org_policy_governed_assets( This RPC only returns either resources of types supported by `searchable asset - types `__, + types `__, or IAM policies. Returns: diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py index 8b52ff457c89..773a614cfb15 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py @@ -1114,37 +1114,35 @@ class SearchAllResourcesRequest(proto.Message): a label ``env``. - ``tagKeys:env`` to find Google Cloud resources that have directly attached tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``tagValues:prod*`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``tagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``effectiveTagKeys:env`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagKey`` `__ - .\ ``namespacedName`` contains ``env``. + ```TagKey.namespacedName`` `__ + contains ``env``. - ``effectiveTagValues:prod*`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``namespacedName`` contains a word prefixed by - ``prod``. + ```TagValue.namespacedName`` `__ + contains a word prefixed by ``prod``. - ``effectiveTagValueIds=tagValues/123`` to find Google Cloud resources that have directly attached or inherited tags where the - ```TagValue`` `__ - .\ ``name`` is exactly ``tagValues/123``. + ```TagValue.name`` `__ + is exactly ``tagValues/123``. - ``kmsKey:key`` to find Google Cloud resources encrypted with a customer-managed encryption key whose name - contains ``key`` as a word. This field is deprecated. - Please use the ``kmsKeys`` field to retrieve Cloud KMS - key information. + contains ``key`` as a word. This field is deprecated. Use + the ``kmsKeys`` field to retrieve Cloud KMS key + information. - ``kmsKeys:key`` to find Google Cloud resources encrypted with customer-managed encryption keys whose name contains the word ``key``. @@ -1159,6 +1157,11 @@ class SearchAllResourcesRequest(proto.Message): with ``instance-group-1`` in the Compute Engine instance group resource name, for relationship type ``INSTANCE_TO_INSTANCEGROUP``. + - ``sccSecurityMarks.key=value`` to find Cloud resources + that are attached with security marks whose key is + ``key`` and value is ``value``. + - ``sccSecurityMarks.key:*`` to find Cloud resources that + are attached with security marks whose key is ``key``. - ``state:ACTIVE`` to find Google Cloud resources whose state contains ``ACTIVE`` as a word. - ``NOT state:ACTIVE`` to find Google Cloud resources whose @@ -1183,7 +1186,7 @@ class SearchAllResourcesRequest(proto.Message): asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset - types `__. + types `__. Regular expressions are also supported. For example: @@ -1409,7 +1412,7 @@ class SearchAllIamPoliciesRequest(proto.Message): Optional. A list of asset types that the IAM policies are attached to. If empty, it will search the IAM policies that are attached to all the `searchable asset - types `__. + types `__. Regular expressions are also supported. For example: @@ -1803,8 +1806,8 @@ class AnalyzeIamPolicyRequest(proto.Message): If both ``analysis_query`` and ``saved_analysis_query`` are provided, they will be merged together with the ``saved_analysis_query`` as base and the ``analysis_query`` - as overrides. For more details of the merge behavior, please - refer to the + as overrides. For more details of the merge behavior, refer + to the `MergeFrom `__ page. @@ -2084,8 +2087,8 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): If both ``analysis_query`` and ``saved_analysis_query`` are provided, they will be merged together with the ``saved_analysis_query`` as base and the ``analysis_query`` - as overrides. For more details of the merge behavior, please - refer to the + as overrides. For more details of the merge behavior, refer + to the `MergeFrom `__ doc. @@ -2960,7 +2963,7 @@ class BatchGetEffectiveIamPoliciesRequest(proto.Message): Required. The names refer to the [full_resource_names] (https://cloud.google.com/asset-inventory/docs/resource-name-format) of `searchable asset - types `__. + types `__. A maximum of 20 resources' effective policies can be retrieved in a batch. """ @@ -3514,13 +3517,17 @@ class AnalyzeOrgPoliciesRequest(proto.Message): filter (str): The expression to filter [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - The only supported field is - ``consolidated_policy.attached_resource``, and the only - supported operator is ``=``. + Filtering is currently available for bare literal values and + the following fields: - Example: + - consolidated_policy.attached_resource + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return the org policy results of"folders/001". + will return all the Organization Policy results attached to + "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, @@ -3647,13 +3654,18 @@ class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): only contains organization policies for the provided constraint. filter (str): - The expression to filter the governed containers in result. - The only supported field is ``parent``, and the only - supported operator is ``=``. + The expression to filter + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + Filtering is currently available for bare literal values and + the following fields: - Example: + - parent + - consolidated_policy.rules.enforce + + When filtering by a specific field, the only supported + operator is ``=``. For example, filtering by parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all containers under "folders/001". + will return all the containers under "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, @@ -3797,22 +3809,38 @@ class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): contains analyzed organization policies for the provided constraint. filter (str): - The expression to filter the governed assets in result. The - only supported fields for governed resources are - ``governed_resource.project`` and - ``governed_resource.folders``. The only supported fields for - governed iam policies are ``governed_iam_policy.project`` - and ``governed_iam_policy.folders``. The only supported - operator is ``=``. - - Example 1: governed_resource.project="projects/12345678" - filter will return all governed resources under - projects/12345678 including the project ifself, if - applicable. - - Example 2: governed_iam_policy.folders="folders/12345678" - filter will return all governed iam policies under - folders/12345678, if applicable. + The expression to filter + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + + For governed resources, filtering is currently available for + bare literal values and the following fields: + + - governed_resource.project + - governed_resource.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_resource.project`` or + ``consolidated_policy.rules.enforce``, the only supported + operator is ``=``. When filtering by + ``governed_resource.folders``, the supported operators + are ``=`` and ``:``. For example, filtering by + ``governed_resource.project="projects/12345678"`` will + return all the governed resources under + "projects/12345678", including the project itself if + applicable. + + For governed IAM policies, filtering is currently available + for bare literal values and the following fields: + + - governed_iam_policy.project + - governed_iam_policy.folders + - consolidated_policy.rules.enforce When filtering by + ``governed_iam_policy.project`` or + ``consolidated_policy.rules.enforce``, the only supported + operator is ``=``. When filtering by + ``governed_iam_policy.folders``, the supported operators + are ``=`` and ``:``. For example, filtering by + ``governed_iam_policy.folders:"folders/12345678"`` will + return all the governed IAM policies under "folders/001". page_size (int): The maximum number of items to return per page. If unspecified, diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py index 2dfdab3435cf..4e0fd8c73780 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py @@ -202,17 +202,17 @@ class Asset(proto.Message): There can be more than one organization policy with different constraints set on a given resource. access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): - Please also refer to the `access policy user + Also refer to the `access policy user guide `__. This field is a member of `oneof`_ ``access_context_policy``. access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): - Please also refer to the `access level user + Also refer to the `access level user guide `__. This field is a member of `oneof`_ ``access_context_policy``. service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): - Please also refer to the `service perimeter user + Also refer to the `service perimeter user guide `__. This field is a member of `oneof`_ ``access_context_policy``. @@ -346,8 +346,6 @@ class Resource(proto.Message): hierarchy `__. Example: ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` - - For third-party assets, this field may be set differently. data (google.protobuf.struct_pb2.Struct): The content of the resource, in which some sensitive fields are removed and may not be @@ -714,10 +712,10 @@ class ResourceSearchResult(proto.Message): name. This field only presents for the purpose of backward - compatibility. Please use the ``kms_keys`` field to retrieve - Cloud KMS key information. This field is available only when - the resource's Protobuf contains it and will only be - populated for `these resource + compatibility. Use the ``kms_keys`` field to retrieve Cloud + KMS key information. This field is available only when the + resource's Protobuf contains it and will only be populated + for `these resource types `__ for backward compatible purposes. @@ -799,7 +797,7 @@ class ResourceSearchResult(proto.Message): provided by the corresponding Google Cloud service (e.g., Compute Engine). see `API references and supported searchable - attributes `__ + attributes `__ to see which fields are included. You can search values of these fields through free text @@ -849,7 +847,7 @@ class ResourceSearchResult(proto.Message): types `__. tag_keys (MutableSequence[str]): This field is only present for the purpose of backward - compatibility. Please use the ``tags`` field instead. + compatibility. Use the ``tags`` field instead. TagKey namespaced names, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the @@ -866,7 +864,7 @@ class ResourceSearchResult(proto.Message): - ``env`` tag_values (MutableSequence[str]): This field is only present for the purpose of backward - compatibility. Please use the ``tags`` field instead. + compatibility. Use the ``tags`` field instead. TagValue namespaced names, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To @@ -884,7 +882,7 @@ class ResourceSearchResult(proto.Message): - ``prod`` tag_value_ids (MutableSequence[str]): This field is only present for the purpose of backward - compatibility. Please use the ``tags`` field instead. + compatibility. Use the ``tags`` field instead. TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To search against the ``tagValueIds``: @@ -948,12 +946,7 @@ class ResourceSearchResult(proto.Message): The actual content of Security Command Center security marks associated with the asset. - Note that both staging & prod SecurityMarks are attached on - prod resources. In CAS preprod/prod, both staging & prod - SecurityMarks are ingested and returned in the following - ``security_marks`` map. In that case, the prefix "staging." - will be added to the keys of all the staging marks. To - search against SCC SecurityMarks field: + To search against SCC SecurityMarks field: - Use a field query: @@ -1107,7 +1100,7 @@ class VersionedResource(proto.Message): You can find the resource definition for each supported resource type in this table: - ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types`` """ version: str = proto.Field( @@ -1134,7 +1127,7 @@ class AttachedResource(proto.Message): You can find the supported attached asset types of each resource in this table: - ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types`` versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): Versioned resource representations of this attached resource. This is repeated because diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 3d872171749b..d19a72a3bcfa 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/__init__.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/__init__.py index d910565287c2..068ee389c30d 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/__init__.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/__init__.py @@ -20,9 +20,11 @@ from .services.asset_service import AssetServiceAsyncClient, AssetServiceClient from .types.asset_service import ( + BatchGetAssetsHistoryResponse, ContentType, CreateFeedRequest, DeleteFeedRequest, + ExportAssetsResponse, Feed, FeedOutputConfig, GcsDestination, @@ -39,9 +41,11 @@ "AssetServiceAsyncClient", "Asset", "AssetServiceClient", + "BatchGetAssetsHistoryResponse", "ContentType", "CreateFeedRequest", "DeleteFeedRequest", + "ExportAssetsResponse", "Feed", "FeedOutputConfig", "GcsDestination", diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 3d872171749b..d19a72a3bcfa 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py index 501e3fd570dc..f5c887281a9f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py @@ -42,6 +42,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.cloud.asset_v1p2beta1.types import asset_service from .client import AssetServiceClient @@ -263,8 +265,8 @@ async def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -779,6 +781,60 @@ async def sample_delete_feed(): metadata=metadata, ) + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "AssetServiceAsyncClient": return self diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index 635e361e52df..29f3e3947ce2 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -46,6 +46,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.cloud.asset_v1p2beta1.types import asset_service from .transports.base import DEFAULT_CLIENT_INFO, AssetServiceTransport @@ -489,8 +491,8 @@ def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -988,6 +990,60 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py index ed8800ead5d9..0d10e425debd 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py @@ -22,6 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -236,6 +237,15 @@ def delete_feed( ]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py index c6d452b2d603..46e6ae054963 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore @@ -363,6 +364,23 @@ def delete_feed( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py index 7402afe669b7..6fd052c8580d 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -370,5 +371,22 @@ def delete_feed( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ("AssetServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py index da55c15c7f11..d795859f2744 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py @@ -36,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.asset_v1p2beta1.types import asset_service @@ -203,6 +204,29 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class AssetServiceRestStub: @@ -798,6 +822,72 @@ def update_feed( # In C++ this would require a dynamic_cast return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AssetServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1p2beta1/{name=*/*/operations/*/**}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/__init__.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/__init__.py index d243980fc029..7ac7d791ef65 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/__init__.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/__init__.py @@ -14,9 +14,11 @@ # limitations under the License. # from .asset_service import ( + BatchGetAssetsHistoryResponse, ContentType, CreateFeedRequest, DeleteFeedRequest, + ExportAssetsResponse, Feed, FeedOutputConfig, GcsDestination, @@ -30,8 +32,10 @@ from .assets import Asset, Resource, TemporalAsset, TimeWindow __all__ = ( + "BatchGetAssetsHistoryResponse", "CreateFeedRequest", "DeleteFeedRequest", + "ExportAssetsResponse", "Feed", "FeedOutputConfig", "GcsDestination", diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/asset_service.py index c164986483c8..abcbbaa4c966 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/asset_service.py @@ -18,12 +18,17 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.asset_v1p2beta1.types import assets as gca_assets + __protobuf__ = proto.module( package="google.cloud.asset.v1p2beta1", manifest={ "ContentType", + "ExportAssetsResponse", + "BatchGetAssetsHistoryResponse", "CreateFeedRequest", "GetFeedRequest", "ListFeedsRequest", @@ -55,6 +60,48 @@ class ContentType(proto.Enum): IAM_POLICY = 2 +class ExportAssetsResponse(proto.Message): + r"""The export asset response. This message is returned by the + [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] + method in the returned + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field. + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Time the snapshot was taken. + output_config (google.cloud.asset_v1p2beta1.types.OutputConfig): + Output configuration indicating where the + results were output to. + """ + + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + output_config: "OutputConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="OutputConfig", + ) + + +class BatchGetAssetsHistoryResponse(proto.Message): + r"""Batch get assets history response. + + Attributes: + assets (MutableSequence[google.cloud.asset_v1p2beta1.types.TemporalAsset]): + A list of assets with valid time windows. + """ + + assets: MutableSequence[gca_assets.TemporalAsset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_assets.TemporalAsset, + ) + + class CreateFeedRequest(proto.Message): r"""Create asset feed request. @@ -66,7 +113,7 @@ class CreateFeedRequest(proto.Message): organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID (such as - "projects/my-project-id")", or a project number + "projects/my-project-id"), or a project number (such as "projects/12345"). feed_id (str): Required. This is the client-assigned asset @@ -218,7 +265,7 @@ class GcsDestination(proto.Message): Attributes: uri (str): - The uri of the Cloud Storage object. It's the same uri that + The URI of the Cloud Storage object. It's the same URI that is used by gsutil. For example: "gs://bucket_name/object_name". See `Viewing and Editing Object @@ -236,12 +283,12 @@ class GcsDestination(proto.Message): class PubsubDestination(proto.Message): - r"""A Cloud Pubsub destination. + r"""A Pub/Sub destination. Attributes: topic (str): - The name of the Cloud Pub/Sub topic to publish to. For - example: ``projects/PROJECT_ID/topics/TOPIC_ID``. + The name of the Pub/Sub topic to publish to. For example: + ``projects/PROJECT_ID/topics/TOPIC_ID``. """ topic: str = proto.Field( @@ -257,7 +304,7 @@ class FeedOutputConfig(proto.Message): Attributes: pubsub_destination (google.cloud.asset_v1p2beta1.types.PubsubDestination): - Destination on Cloud Pubsub. + Destination on Pub/Sub. This field is a member of `oneof`_ ``destination``. """ @@ -293,7 +340,7 @@ class Feed(proto.Message): A list of the full names of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only asset updates matching specified - asset_names and asset_types are exported to the feed. For + asset_names or asset_types are exported to the feed. For example: ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. See `Resource @@ -302,7 +349,7 @@ class Feed(proto.Message): asset_types (MutableSequence[str]): A list of types of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only - asset updates matching specified asset_names and asset_types + asset updates matching specified asset_names or asset_types are exported to the feed. For example: "compute.googleapis.com/Disk" See `Introduction to Cloud Asset diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/assets.py index c8d2c7759c82..63be218e9354 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/types/assets.py @@ -17,7 +17,13 @@ from typing import MutableMapping, MutableSequence +from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import ( + service_perimeter_pb2, +) # type: ignore from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -34,18 +40,18 @@ class TemporalAsset(proto.Message): - r"""Temporal asset. In addition to the asset, the temporal asset - includes the status of the asset and valid from and to time of - it. + r"""An asset in Google Cloud and its temporal metadata, including + the time window when it was observed and its status during that + window. Attributes: window (google.cloud.asset_v1p2beta1.types.TimeWindow): The time window when the asset data and state was observed. deleted (bool): - If the asset is deleted or not. + Whether the asset has been deleted or not. asset (google.cloud.asset_v1p2beta1.types.Asset): - Asset. + An asset in Google Cloud. """ window: "TimeWindow" = proto.Field( @@ -65,14 +71,15 @@ class TemporalAsset(proto.Message): class TimeWindow(proto.Message): - r"""A time window of (start_time, end_time]. + r"""A time window specified by its ``start_time`` and ``end_time``. Attributes: start_time (google.protobuf.timestamp_pb2.Timestamp): Start time of the time window (exclusive). end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of the time window (inclusive). - Current timestamp if not specified. + End time of the time window (inclusive). If + not specified, the current timestamp is used + instead. """ start_time: timestamp_pb2.Timestamp = proto.Field( @@ -88,35 +95,81 @@ class TimeWindow(proto.Message): class Asset(proto.Message): - r"""Cloud asset. This includes all Google Cloud Platform - resources, Cloud IAM policies, and other non-GCP assets. + r"""An asset in Google Cloud. An asset can be any resource in the Google + Cloud `resource + hierarchy `__, + a resource outside the Google Cloud resource hierarchy (such as + Google Kubernetes Engine clusters and objects), or a policy (e.g. + IAM policy). See `Supported asset + types `__ + for more information. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: name (str): - The full name of the asset. For example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + The full name of the asset. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` + See `Resource - Names `__ + names `__ for more information. asset_type (str): - Type of the asset. Example: - "compute.googleapis.com/Disk". + The type of the asset. Example: + ``compute.googleapis.com/Disk`` + + See `Supported asset + types `__ + for more information. resource (google.cloud.asset_v1p2beta1.types.Resource): - Representation of the resource. + A representation of the resource. iam_policy (google.iam.v1.policy_pb2.Policy): - Representation of the actual Cloud IAM policy - set on a cloud resource. For each resource, - there must be at most one Cloud IAM policy set - on it. + A representation of the IAM policy set on a Google Cloud + resource. There can be a maximum of one IAM policy set on + any given resource. In addition, IAM policies inherit their + granted access scope from any policies set on parent + resources in the resource hierarchy. Therefore, the + effectively policy is the union of both the policy set on + this resource and each policy set on all of the resource's + ancestry resource levels in the hierarchy. See `this + topic `__ + for more information. ancestors (MutableSequence[str]): - Asset's ancestry path in Cloud Resource Manager (CRM) - hierarchy, represented as a list of relative resource names. - Ancestry path starts with the closest CRM ancestor and ends - at root. If the asset is a CRM project/folder/organization, - this starts from the asset itself. - - Example: ["projects/123456789", "folders/5432", - "organizations/1234"] + The ancestry path of an asset in Google Cloud `resource + hierarchy `__, + represented as a list of relative resource names. An + ancestry path starts with the closest ancestor in the + hierarchy and ends at root. If the asset is a project, + folder, or organization, the ancestry path starts from the + asset itself. + + Example: + ``["projects/123456789", "folders/5432", "organizations/1234"]`` + access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): + Please also refer to the `access policy user + guide `__. + + This field is a member of `oneof`_ ``access_context_policy``. + access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): + Please also refer to the `access level user + guide `__. + + This field is a member of `oneof`_ ``access_context_policy``. + service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): + Please also refer to the `service perimeter user + guide `__. + + This field is a member of `oneof`_ ``access_context_policy``. + org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): + A representation of an `organization + policy `__. + There can be more than one organization policy with + different constraints set on a given resource. """ name: str = proto.Field( @@ -141,49 +194,73 @@ class Asset(proto.Message): proto.STRING, number=6, ) + access_policy: access_policy_pb2.AccessPolicy = proto.Field( + proto.MESSAGE, + number=7, + oneof="access_context_policy", + message=access_policy_pb2.AccessPolicy, + ) + access_level: access_level_pb2.AccessLevel = proto.Field( + proto.MESSAGE, + number=8, + oneof="access_context_policy", + message=access_level_pb2.AccessLevel, + ) + service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( + proto.MESSAGE, + number=9, + oneof="access_context_policy", + message=service_perimeter_pb2.ServicePerimeter, + ) + org_policy: MutableSequence[orgpolicy_pb2.Policy] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=orgpolicy_pb2.Policy, + ) class Resource(proto.Message): - r"""Representation of a cloud resource. + r"""A representation of a Google Cloud resource. Attributes: version (str): - The API version. Example: "v1". + The API version. Example: ``v1`` discovery_document_uri (str): The URL of the discovery document containing the resource's - JSON schema. For example: - ``"https://www.googleapis.com/discovery/v1/apis/compute/v1/rest"``. - It will be left unspecified for resources without a - discovery-based API, such as Cloud Bigtable. + JSON schema. Example: + ``https://www.googleapis.com/discovery/v1/apis/compute/v1/rest`` + + This value is unspecified for resources that do not have an + API based on a discovery document, such as Cloud Bigtable. discovery_name (str): - The JSON schema name listed in the discovery - document. Example: "Project". It will be left - unspecified for resources (such as Cloud - Bigtable) without a discovery-based API. + The JSON schema name listed in the discovery document. + Example: ``Project`` + + This value is unspecified for resources that do not have an + API based on a discovery document, such as Cloud Bigtable. resource_url (str): - The REST URL for accessing the resource. An HTTP GET - operation using this URL returns the resource itself. - Example: - ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123``. - It will be left unspecified for resources without a REST - API. + The REST URL for accessing the resource. An HTTP ``GET`` + request using this URL returns the resource itself. Example: + ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`` + + This value is unspecified for resources without a REST API. parent (str): The full name of the immediate parent of this resource. See `Resource Names `__ for more information. - For GCP assets, it is the parent resource defined in the - `Cloud IAM policy + For Google Cloud assets, this value is the parent resource + defined in the `IAM policy hierarchy `__. - For example: - ``"//cloudresourcemanager.googleapis.com/projects/my_project_123"``. + Example: + ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` - For third-party assets, it is up to the users to define. + For third-party assets, this field may be set differently. data (google.protobuf.struct_pb2.Struct): The content of the resource, in which some - sensitive fields are scrubbed away and may not - be present. + sensitive fields are removed and may not be + present. """ version: str = proto.Field( diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index 6d446aad24eb..c5c0a1e00a99 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 3d872171749b..d19a72a3bcfa 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.21.0" # {x-release-please-version} +__version__ = "3.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 49e86ca067b8..e8889a837f93 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.21.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index 9b56624814df..7e5cf31d7c96 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.21.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index d8c9e9f32cad..4318a61b1761 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.21.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 681b6b289f54..ea74eaaebb3b 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.21.0" + "version": "3.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py index d36a3f340f11..2c827659c511 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py @@ -32,6 +32,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -3339,6 +3340,7 @@ def test_asset_service_base_transport(): "list_feeds", "update_feed", "delete_feed", + "get_operation", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3892,6 +3894,209 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "sample1/sample2/operations/sample3/sample4"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "sample1/sample2/operations/sample3/sample4"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 4b186fc56016..682401bb4cb8 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,39 @@ # Changelog +## [0.17.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.8...google-cloud-batch-v0.17.9) (2024-01-24) + + +### Features + +* Add `run_as_non_root` field to allow user's runnable be executed as non root ([7d78274](https://github.com/googleapis/google-cloud-python/commit/7d78274ac9fb2f535e222c538d7908d8705a3314)) +* Add `tags` field in Job's AllocationPolicy field in v1 ([7d78274](https://github.com/googleapis/google-cloud-python/commit/7d78274ac9fb2f535e222c538d7908d8705a3314)) +* Add Batch Image Streaming support for v1 ([7d78274](https://github.com/googleapis/google-cloud-python/commit/7d78274ac9fb2f535e222c538d7908d8705a3314)) + + +### Documentation + +* [google-cloud-batch] Polish the field descriptions for enableImageStreaming and CloudLoggingOptions ([#12216](https://github.com/googleapis/google-cloud-python/issues/12216)) ([d23ec54](https://github.com/googleapis/google-cloud-python/commit/d23ec544504af029ac9530cc5cb435eb0f02e384)) + +## [0.17.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.7...google-cloud-batch-v0.17.8) (2024-01-22) + + +### Bug Fixes + +* **v1alpha:** [google-cloud-batch] remove deprecated field enableOslogin ([#12210](https://github.com/googleapis/google-cloud-python/issues/12210)) ([527862b](https://github.com/googleapis/google-cloud-python/commit/527862b9f38f9ef47b33584912d18aed191aaa6a)) + +## [0.17.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.6...google-cloud-batch-v0.17.7) (2024-01-12) + + +### Features + +* Add `run_as_non_root field` and deprecate `enable_oslogin` for non-root execution ([ce7ddbf](https://github.com/googleapis/google-cloud-python/commit/ce7ddbfdb90ad6e1eb46a79ce3e12276fbfa00ba)) +* Add `tags` field in Job's AllocationPolicy field in v1alpha ([ce7ddbf](https://github.com/googleapis/google-cloud-python/commit/ce7ddbfdb90ad6e1eb46a79ce3e12276fbfa00ba)) + + +### Documentation + +* updated comments ([ce7ddbf](https://github.com/googleapis/google-cloud-python/commit/ce7ddbfdb90ad6e1eb46a79ce3e12276fbfa00ba)) + ## [0.17.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.5...google-cloud-batch-v0.17.6) (2023-12-07) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 9d38e8c7ee16..9c4284842cb3 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.6" # {x-release-please-version} +__version__ = "0.17.9" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 9d38e8c7ee16..9c4284842cb3 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.6" # {x-release-please-version} +__version__ = "0.17.9" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 7d69548a7516..97a3cc49ef68 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -150,8 +150,8 @@ class LogsPolicy(proto.Message): path. cloud_logging_option (google.cloud.batch_v1.types.LogsPolicy.CloudLoggingOption): Optional. Additional settings for Cloud Logging. It will - only take effect when the destination of LogsPolicy is set - to CLOUD_LOGGING. + only take effect when the destination of ``LogsPolicy`` is + set to ``CLOUD_LOGGING``. """ class Destination(proto.Enum): @@ -170,11 +170,26 @@ class Destination(proto.Enum): PATH = 2 class CloudLoggingOption(proto.Message): - r"""CloudLoggingOption contains additional settings for cloud - logging generated by Batch job. + r"""``CloudLoggingOption`` contains additional settings for Cloud + Logging logs generated by Batch job. + Attributes: + use_generic_task_monitored_resource (bool): + Optional. Set this flag to true to change the `monitored + resource + type `__ + for Cloud Logging logs generated by this Batch job from the + ```batch.googleapis.com/Job`` `__ + type to the formerly used + ```generic_task`` `__ + type. """ + use_generic_task_monitored_resource: bool = proto.Field( + proto.BOOL, + number=1, + ) + destination: Destination = proto.Field( proto.ENUM, number=1, @@ -422,11 +437,17 @@ class AllocationPolicy(proto.Message): The network policy. If you define an instance template in the - InstancePolicyOrTemplate field, Batch will use - the network settings in the instance template - instead of this field. + ``InstancePolicyOrTemplate`` field, Batch will use the + network settings in the instance template instead of this + field. placement (google.cloud.batch_v1.types.AllocationPolicy.PlacementPolicy): The placement policy. + tags (MutableSequence[str]): + Optional. Tags applied to the VM instances. + + The tags identify valid sources or targets for network + firewalls. Each tag must be 1-63 characters long, and comply + with `RFC1035 `__. """ class ProvisioningModel(proto.Enum): @@ -906,6 +927,10 @@ class PlacementPolicy(proto.Message): number=10, message=PlacementPolicy, ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) class TaskGroup(proto.Message): @@ -961,6 +986,14 @@ class TaskGroup(proto.Message): When true, Batch will configure SSH to allow passwordless login between VMs running the Batch tasks in the same TaskGroup. + run_as_non_root (bool): + Optional. If not set or set to false, Batch + will use root user to execute runnables. If set + to true, Batch will make sure to run the + runnables using non-root user. Currently, the + non-root user Batch used is generated by OS + login. Reference: + https://cloud.google.com/compute/docs/oslogin """ class SchedulingPolicy(proto.Enum): @@ -1022,6 +1055,10 @@ class SchedulingPolicy(proto.Enum): proto.BOOL, number=12, ) + run_as_non_root: bool = proto.Field( + proto.BOOL, + number=14, + ) class ServiceAccount(proto.Message): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index e96bedee5632..0c8dee62edfa 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -325,6 +325,27 @@ class Container(proto.Message): Optional password for logging in to a docker registry. If password matches ``projects/*/secrets/*/versions/*`` then Batch will read the password from the Secret Manager; + enable_image_streaming (bool): + Optional. If set to true, this container runnable uses Image + streaming. + + Use Image streaming to allow the runnable to initialize + without waiting for the entire container image to download, + which can significantly reduce startup time for large + container images. + + When ``enableImageStreaming`` is set to true, the container + runtime is `containerd `__ instead + of Docker. Additionally, this container runnable only + supports the following ``container`` subfields: + ``imageUri``, ``commands[]``, ``entrypoint``, and + ``volumes[]``; any other ``container`` subfields are + ignored. + + For more information about the requirements and limitations + for using Image streaming with Batch, see the + ```image-streaming`` sample on + GitHub `__. """ image_uri: str = proto.Field( @@ -359,6 +380,10 @@ class Container(proto.Message): proto.STRING, number=11, ) + enable_image_streaming: bool = proto.Field( + proto.BOOL, + number=12, + ) class Script(proto.Message): r"""Script runnable. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 9d38e8c7ee16..9c4284842cb3 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.6" # {x-release-please-version} +__version__ = "0.17.9" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index e20fdf7e0600..edb935159de3 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -190,8 +190,8 @@ class LogsPolicy(proto.Message): path. cloud_logging_option (google.cloud.batch_v1alpha.types.LogsPolicy.CloudLoggingOption): Optional. Additional settings for Cloud Logging. It will - only take effect when the destination of LogsPolicy is set - to CLOUD_LOGGING. + only take effect when the destination of ``LogsPolicy`` is + set to ``CLOUD_LOGGING``. """ class Destination(proto.Enum): @@ -210,13 +210,19 @@ class Destination(proto.Enum): PATH = 2 class CloudLoggingOption(proto.Message): - r"""CloudLoggingOption contains additional settings for cloud - logging generated by Batch job. + r"""``CloudLoggingOption`` contains additional settings for Cloud + Logging logs generated by Batch job. Attributes: use_generic_task_monitored_resource (bool): - Optional. Set this flag to true to use generic_task as - monitored resource for Batch job generated cloud logging. + Optional. Set this flag to true to change the `monitored + resource + type `__ + for Cloud Logging logs generated by this Batch job from the + ```batch.googleapis.com/Job`` `__ + type to the formerly used + ```generic_task`` `__ + type. """ use_generic_task_monitored_resource: bool = proto.Field( @@ -543,11 +549,17 @@ class AllocationPolicy(proto.Message): The network policy. If you define an instance template in the - InstancePolicyOrTemplate field, Batch will use - the network settings in the instance template - instead of this field. + ``InstancePolicyOrTemplate`` field, Batch will use the + network settings in the instance template instead of this + field. placement (google.cloud.batch_v1alpha.types.AllocationPolicy.PlacementPolicy): The placement policy. + tags (MutableSequence[str]): + Optional. Tags applied to the VM instances. + + The tags identify valid sources or targets for network + firewalls. Each tag must be 1-63 characters long, and comply + with `RFC1035 `__. """ class ProvisioningModel(proto.Enum): @@ -1059,6 +1071,10 @@ class PlacementPolicy(proto.Message): number=10, message=PlacementPolicy, ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) class TaskGroup(proto.Message): @@ -1125,19 +1141,14 @@ class TaskGroup(proto.Message): When true, Batch will configure SSH to allow passwordless login between VMs running the Batch tasks in the same TaskGroup. - enable_oslogin (bool): - Optional. When true, Batch will use the OS Login generated - POSIX account to exeucute the runnables instead of the - default root user. - - | To control root or non-root privilege for runnable - execution, the project - | Admin user needs to configure IAM roles according to - https://cloud.google.com/compute/docs/oslogin/set-up-oslogin#configure_users. - Specifically, if a root execution is needed, the - roles/compute.osAdminLogin should be granted to the Batch - job submitter. Otherwise, roles/compute.osLogin should be - granted to the Batch job submitter. + run_as_non_root (bool): + Optional. If not set or set to false, Batch + will use root user to execute runnables. If set + to true, Batch will make sure to run the + runnables using non-root user. Currently, the + non-root user Batch used is generated by OS + login. Reference: + https://cloud.google.com/compute/docs/oslogin """ class SchedulingPolicy(proto.Enum): @@ -1209,9 +1220,9 @@ class SchedulingPolicy(proto.Enum): proto.BOOL, number=12, ) - enable_oslogin: bool = proto.Field( + run_as_non_root: bool = proto.Field( proto.BOOL, - number=13, + number=14, ) diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 048ae55cf83e..f68a8c8e9e35 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -366,12 +366,26 @@ class Container(proto.Message): password matches ``projects/*/secrets/*/versions/*`` then Batch will read the password from the Secret Manager; enable_image_streaming (bool): - Optional. Not yet implemented. - If set to true, container will run with Image - streaming. The container runtime will be changed - to containerd instead of docker. Currently, only - imageUri, commands, entrypoint and volumes are - supported and any other fields will be ignored. + Optional. If set to true, this container runnable uses Image + streaming. + + Use Image streaming to allow the runnable to initialize + without waiting for the entire container image to download, + which can significantly reduce startup time for large + container images. + + When ``enableImageStreaming`` is set to true, the container + runtime is `containerd `__ instead + of Docker. Additionally, this container runnable only + supports the following ``container`` subfields: + ``imageUri``, ``commands[]``, ``entrypoint``, and + ``volumes[]``; any other ``container`` subfields are + ignored. + + For more information about the requirements and limitations + for using Image streaming with Batch, see the + ```image-streaming`` sample on + GitHub `__. """ image_uri: str = proto.Field( diff --git a/packages/google-cloud-batch/noxfile.py b/packages/google-cloud-batch/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-batch/noxfile.py +++ b/packages/google-cloud-batch/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index f7d0f46538b1..7d48f64c2ce2 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.6" + "version": "0.17.9" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 860ba626f131..0d99dacbb551 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.6" + "version": "0.17.9" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 90ea6a446060..8b4ad9baffda 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -2538,6 +2538,7 @@ def test_create_job_rest(request_type): "block_external_network": True, "username": "username_value", "password": "password_value", + "enable_image_streaming": True, }, "script": {"path": "path_value", "text": "text_value"}, "barrier": {"name": "name_value"}, @@ -2592,6 +2593,7 @@ def test_create_job_rest(request_type): "task_count_per_node": 2022, "require_hosts_file": True, "permissive_ssh": True, + "run_as_non_root": True, } ], "allocation_policy": { @@ -2650,6 +2652,7 @@ def test_create_job_rest(request_type): ] }, "placement": {"collocation": "collocation_value", "max_distance": 1264}, + "tags": ["tags_value1", "tags_value2"], }, "labels": {}, "status": { @@ -2671,7 +2674,7 @@ def test_create_job_rest(request_type): "logs_policy": { "destination": 1, "logs_path": "logs_path_value", - "cloud_logging_option": {}, + "cloud_logging_option": {"use_generic_task_monitored_resource": True}, }, "notifications": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index b55bc843d733..5b02fb8e4273 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -2681,13 +2681,14 @@ def test_create_job_rest(request_type): "collocation": "collocation_value", "max_distance": 1264, }, + "tags": ["tags_value1", "tags_value2"], }, "labels": {}, "task_environments": {}, "task_count_per_node": 2022, "require_hosts_file": True, "permissive_ssh": True, - "enable_oslogin": True, + "run_as_non_root": True, } ], "scheduling_policy": 1, diff --git a/packages/google-cloud-billing/CHANGELOG.md b/packages/google-cloud-billing/CHANGELOG.md index d3c70bdf0d07..ef2db3235344 100644 --- a/packages/google-cloud-billing/CHANGELOG.md +++ b/packages/google-cloud-billing/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.12.0...google-cloud-billing-v1.12.1) (2024-01-19) + + +### Documentation + +* [google-cloud-billing] update comments ([#12202](https://github.com/googleapis/google-cloud-python/issues/12202)) ([9acf675](https://github.com/googleapis/google-cloud-python/commit/9acf675503176395452d5d5bd464fb20757f2ab8)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.11.5...google-cloud-billing-v1.12.0) (2023-12-07) diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 18a7732658ee..ac3bc60c3fb3 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 18a7732658ee..ac3bc60c3fb3 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py index 028d9dec4c70..e56655c36ce6 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py @@ -56,8 +56,8 @@ class BillingAccount(proto.Message): open, and will therefore be charged for any usage on associated projects. False if the billing account is closed, and therefore - projects associated with it will be unable to - use paid services. + projects associated with it are unable to use + paid services. display_name (str): The display name given to the billing account, such as ``My Billing Account``. This name is displayed in the Google @@ -422,8 +422,8 @@ class MoveBillingAccountRequest(proto.Message): always belongs to the same organization as its parent account. destination_parent (str): - Required. The resource name of the Organization to reparent - the billing account under. Must be of the form + Required. The resource name of the Organization to move the + billing account under. Must be of the form ``organizations/{organization_id}``. """ diff --git a/packages/google-cloud-billing/noxfile.py b/packages/google-cloud-billing/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-billing/noxfile.py +++ b/packages/google-cloud-billing/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index 59ad456a11a5..a5f6e0d8e02f 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.12.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index 37525ec3a370..68f8a0536805 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [1.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.17.0...google-cloud-channel-v1.17.1) (2024-01-04) + + +### Documentation + +* Add deprecation comment for method `FetchReportResults` in service `CloudChannelReportsService` ([1e6bf49](https://github.com/googleapis/google-cloud-python/commit/1e6bf49abdd6cc27af391acab15b4ca089111849)) +* Add deprecation comment for method `ListReports` in service `CloudChannelReportsService` ([1e6bf49](https://github.com/googleapis/google-cloud-python/commit/1e6bf49abdd6cc27af391acab15b4ca089111849)) +* Add deprecation comment for method `RunReportJob` in service `CloudChannelReportsService` ([1e6bf49](https://github.com/googleapis/google-cloud-python/commit/1e6bf49abdd6cc27af391acab15b4ca089111849)) +* Add deprecation comment for service `CloudChannelReportsService` ([1e6bf49](https://github.com/googleapis/google-cloud-python/commit/1e6bf49abdd6cc27af391acab15b4ca089111849)) + ## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.16.0...google-cloud-channel-v1.17.0) (2023-12-07) diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index 3293978284a9..21446a04d2fc 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index 3293978284a9..21446a04d2fc 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py index 670820e699ca..cbc4b23a3ed9 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py @@ -27,6 +27,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -56,9 +57,14 @@ class CloudChannelReportsServiceAsyncClient: """CloudChannelReportsService lets Google Cloud resellers and - distributors retrieve and combine a variety of data in Cloud - Channel for multiple products (Google Cloud, Google Voice, and - Google Workspace.) + distributors retrieve and combine a variety of data in Cloud Channel + for multiple products (Google Cloud, Google Voice, and Google + Workspace.) + + Deprecated: This service is being deprecated. Please use `Export + Channel Services data to + BigQuery `__ + instead. """ _client: CloudChannelReportsServiceClient @@ -268,6 +274,10 @@ async def run_report_job( with the [RunReportJobResponse.report_job][google.cloud.channel.v1.RunReportJobResponse.report_job]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -316,6 +326,11 @@ async def sample_run_report_job(): [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. """ + warnings.warn( + "CloudChannelReportsServiceAsyncClient.run_report_job is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. request = reports_service.RunReportJobRequest(request) @@ -366,6 +381,10 @@ async def fetch_report_results( r"""Retrieves data generated by [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -423,6 +442,11 @@ async def sample_fetch_report_results(): resolve additional pages automatically. """ + warnings.warn( + "CloudChannelReportsServiceAsyncClient.fetch_report_results is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -485,9 +509,13 @@ async def list_reports( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListReportsAsyncPager: - r"""Lists the reports that RunReportJob can run. These - reports include an ID, a description, and the list of - columns that will be in the result. + r"""Lists the reports that RunReportJob can run. These reports + include an ID, a description, and the list of columns that will + be in the result. + + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. .. code-block:: python @@ -543,6 +571,11 @@ async def sample_list_reports(): resolve additional pages automatically. """ + warnings.warn( + "CloudChannelReportsServiceAsyncClient.list_reports is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index 8ca64c07efa0..fc82ca9df589 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -96,9 +97,14 @@ def get_transport_class( class CloudChannelReportsServiceClient(metaclass=CloudChannelReportsServiceClientMeta): """CloudChannelReportsService lets Google Cloud resellers and - distributors retrieve and combine a variety of data in Cloud - Channel for multiple products (Google Cloud, Google Voice, and - Google Workspace.) + distributors retrieve and combine a variety of data in Cloud Channel + for multiple products (Google Cloud, Google Voice, and Google + Workspace.) + + Deprecated: This service is being deprecated. Please use `Export + Channel Services data to + BigQuery `__ + instead. """ @staticmethod @@ -498,6 +504,10 @@ def run_report_job( with the [RunReportJobResponse.report_job][google.cloud.channel.v1.RunReportJobResponse.report_job]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -546,6 +556,11 @@ def sample_run_report_job(): [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. """ + warnings.warn( + "CloudChannelReportsServiceClient.run_report_job is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes # in a reports_service.RunReportJobRequest. @@ -597,6 +612,10 @@ def fetch_report_results( r"""Retrieves data generated by [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -654,6 +673,11 @@ def sample_fetch_report_results(): resolve additional pages automatically. """ + warnings.warn( + "CloudChannelReportsServiceClient.fetch_report_results is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -716,9 +740,13 @@ def list_reports( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListReportsPager: - r"""Lists the reports that RunReportJob can run. These - reports include an ID, a description, and the list of - columns that will be in the result. + r"""Lists the reports that RunReportJob can run. These reports + include an ID, a description, and the list of columns that will + be in the result. + + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. .. code-block:: python @@ -774,6 +802,11 @@ def sample_list_reports(): resolve additional pages automatically. """ + warnings.warn( + "CloudChannelReportsServiceClient.list_reports is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc.py index 18f9903b2521..166d2c8c5a7b 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc.py @@ -32,9 +32,14 @@ class CloudChannelReportsServiceGrpcTransport(CloudChannelReportsServiceTranspor """gRPC backend transport for CloudChannelReportsService. CloudChannelReportsService lets Google Cloud resellers and - distributors retrieve and combine a variety of data in Cloud - Channel for multiple products (Google Cloud, Google Voice, and - Google Workspace.) + distributors retrieve and combine a variety of data in Cloud Channel + for multiple products (Google Cloud, Google Voice, and Google + Workspace.) + + Deprecated: This service is being deprecated. Please use `Export + Channel Services data to + BigQuery `__ + instead. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -280,6 +285,10 @@ def run_report_job( with the [RunReportJobResponse.report_job][google.cloud.channel.v1.RunReportJobResponse.report_job]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + Returns: Callable[[~.RunReportJobRequest], ~.Operation]: @@ -310,6 +319,10 @@ def fetch_report_results( Retrieves data generated by [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + Returns: Callable[[~.FetchReportResultsRequest], ~.FetchReportResultsResponse]: @@ -336,9 +349,13 @@ def list_reports( ]: r"""Return a callable for the list reports method over gRPC. - Lists the reports that RunReportJob can run. These - reports include an ID, a description, and the list of - columns that will be in the result. + Lists the reports that RunReportJob can run. These reports + include an ID, a description, and the list of columns that will + be in the result. + + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. Returns: Callable[[~.ListReportsRequest], diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc_asyncio.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc_asyncio.py index 2c6d9ba7cb16..214a31c28d10 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/transports/grpc_asyncio.py @@ -35,9 +35,14 @@ class CloudChannelReportsServiceGrpcAsyncIOTransport( """gRPC AsyncIO backend transport for CloudChannelReportsService. CloudChannelReportsService lets Google Cloud resellers and - distributors retrieve and combine a variety of data in Cloud - Channel for multiple products (Google Cloud, Google Voice, and - Google Workspace.) + distributors retrieve and combine a variety of data in Cloud Channel + for multiple products (Google Cloud, Google Voice, and Google + Workspace.) + + Deprecated: This service is being deprecated. Please use `Export + Channel Services data to + BigQuery `__ + instead. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -289,6 +294,10 @@ def run_report_job( with the [RunReportJobResponse.report_job][google.cloud.channel.v1.RunReportJobResponse.report_job]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + Returns: Callable[[~.RunReportJobRequest], Awaitable[~.Operation]]: @@ -319,6 +328,10 @@ def fetch_report_results( Retrieves data generated by [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob]. + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. + Returns: Callable[[~.FetchReportResultsRequest], Awaitable[~.FetchReportResultsResponse]]: @@ -346,9 +359,13 @@ def list_reports( ]: r"""Return a callable for the list reports method over gRPC. - Lists the reports that RunReportJob can run. These - reports include an ID, a description, and the list of - columns that will be in the result. + Lists the reports that RunReportJob can run. These reports + include an ID, a description, and the list of columns that will + be in the result. + + Deprecated: Please use `Export Channel Services data to + BigQuery `__ + instead. Returns: Callable[[~.ListReportsRequest], diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 844988d4a57d..b41f6ccd4be4 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "1.17.0" + "version": "1.17.1" }, "snippets": [ { diff --git a/packages/google-cloud-cloudquotas/.OwlBot.yaml b/packages/google-cloud-cloudquotas/.OwlBot.yaml new file mode 100644 index 000000000000..c51a395f7d8b --- /dev/null +++ b/packages/google-cloud-cloudquotas/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/api/cloudquotas/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-cloudquotas/$1 +api-name: google-cloud-cloudquotas diff --git a/packages/google-cloud-cloudquotas/.coveragerc b/packages/google-cloud-cloudquotas/.coveragerc new file mode 100644 index 000000000000..ed4665249a5f --- /dev/null +++ b/packages/google-cloud-cloudquotas/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/cloudquotas/__init__.py + google/cloud/cloudquotas/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-cloudquotas/.flake8 b/packages/google-cloud-cloudquotas/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-cloudquotas/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-cloudquotas/.gitignore b/packages/google-cloud-cloudquotas/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-cloudquotas/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-cloudquotas/.repo-metadata.json b/packages/google-cloud-cloudquotas/.repo-metadata.json new file mode 100644 index 000000000000..65ec36bcf232 --- /dev/null +++ b/packages/google-cloud-cloudquotas/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-cloudquotas", + "name_pretty": "Cloud Quotas API", + "api_description": "Cloud Quotas API provides Google Cloud service consumers with management and observability for resource usage, quotas, and restrictions of the services they consume.", + "product_documentation": "https://cloud.google.com/docs/quota/api-overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudquotas/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=445904", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-cloudquotas", + "api_id": "cloudquotas.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "cloudquotas" +} diff --git a/packages/google-cloud-cloudquotas/CHANGELOG.md b/packages/google-cloud-cloudquotas/CHANGELOG.md new file mode 100644 index 000000000000..cfb72cf89b34 --- /dev/null +++ b/packages/google-cloud-cloudquotas/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-01-12) + + +### Features + +* add initial files for google.api.cloudquotas.v1 ([#12193](https://github.com/googleapis/google-cloud-python/issues/12193)) ([797c302](https://github.com/googleapis/google-cloud-python/commit/797c302fcc475657959488a5db503a874d910c21)) + +## Changelog diff --git a/packages/google-cloud-cloudquotas/CODE_OF_CONDUCT.md b/packages/google-cloud-cloudquotas/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-cloudquotas/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-cloudquotas/CONTRIBUTING.rst b/packages/google-cloud-cloudquotas/CONTRIBUTING.rst new file mode 100644 index 000000000000..ee4d1f52e9d1 --- /dev/null +++ b/packages/google-cloud-cloudquotas/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-cloudquotas + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-cloudquotas/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-cloudquotas/LICENSE b/packages/google-cloud-cloudquotas/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-cloudquotas/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-cloudquotas/MANIFEST.in b/packages/google-cloud-cloudquotas/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-cloudquotas/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-cloudquotas/README.rst b/packages/google-cloud-cloudquotas/README.rst new file mode 100644 index 000000000000..e712f7368829 --- /dev/null +++ b/packages/google-cloud-cloudquotas/README.rst @@ -0,0 +1,110 @@ +Python Client for Cloud Quotas API +================================== + +|preview| |pypi| |versions| + +`Cloud Quotas API`_: Cloud Quotas API provides Google Cloud service consumers with management +and observability for resource usage, quotas, and restrictions of the +services they consume. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-cloudquotas.svg + :target: https://pypi.org/project/google-cloud-cloudquotas/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-cloudquotas.svg + :target: https://pypi.org/project/google-cloud-cloudquotas/ +.. _Cloud Quotas API: https://cloud.google.com/docs/quota/api-overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-cloudquotas/latest +.. _Product Documentation: https://cloud.google.com/docs/quota/api-overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Quotas API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Quotas API.: https://cloud.google.com/docs/quota/api-overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-cloudquotas + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-cloudquotas + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Quotas API + to see other available methods on the client. +- Read the `Cloud Quotas API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Quotas API Product documentation: https://cloud.google.com/docs/quota/api-overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-cloudquotas/docs/CHANGELOG.md b/packages/google-cloud-cloudquotas/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-cloudquotas/docs/README.rst b/packages/google-cloud-cloudquotas/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-cloudquotas/docs/_static/custom.css b/packages/google-cloud-cloudquotas/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-cloudquotas/docs/_templates/layout.html b/packages/google-cloud-cloudquotas/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/cloud_quotas.rst b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/cloud_quotas.rst new file mode 100644 index 000000000000..eebe1aab60fb --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/cloud_quotas.rst @@ -0,0 +1,10 @@ +CloudQuotas +----------------------------- + +.. automodule:: google.cloud.cloudquotas_v1.services.cloud_quotas + :members: + :inherited-members: + +.. automodule:: google.cloud.cloudquotas_v1.services.cloud_quotas.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/services_.rst b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/services_.rst new file mode 100644 index 000000000000..2ed9c6a7e724 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Cloudquotas v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + cloud_quotas diff --git a/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/types_.rst b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/types_.rst new file mode 100644 index 000000000000..6b22302fe873 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/cloudquotas_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Cloudquotas v1 API +========================================= + +.. automodule:: google.cloud.cloudquotas_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-cloudquotas/docs/conf.py b/packages/google-cloud-cloudquotas/docs/conf.py new file mode 100644 index 000000000000..5bff33820bf8 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-cloudquotas documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-cloudquotas" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-cloudquotas", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-cloudquotas-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-cloudquotas.tex", + "google-cloud-cloudquotas Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-cloudquotas", + "google-cloud-cloudquotas Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-cloudquotas", + "google-cloud-cloudquotas Documentation", + author, + "google-cloud-cloudquotas", + "google-cloud-cloudquotas Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-cloudquotas/docs/index.rst b/packages/google-cloud-cloudquotas/docs/index.rst new file mode 100644 index 000000000000..2ccf318bc3a7 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudquotas_v1/services_ + cloudquotas_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-cloudquotas`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-cloudquotas/docs/multiprocessing.rst b/packages/google-cloud-cloudquotas/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-cloudquotas/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/__init__.py new file mode 100644 index 000000000000..288a2624b56f --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.cloudquotas import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.cloudquotas_v1.services.cloud_quotas.async_client import ( + CloudQuotasAsyncClient, +) +from google.cloud.cloudquotas_v1.services.cloud_quotas.client import CloudQuotasClient +from google.cloud.cloudquotas_v1.types.cloudquotas import ( + CreateQuotaPreferenceRequest, + GetQuotaInfoRequest, + GetQuotaPreferenceRequest, + ListQuotaInfosRequest, + ListQuotaInfosResponse, + ListQuotaPreferencesRequest, + ListQuotaPreferencesResponse, + UpdateQuotaPreferenceRequest, +) +from google.cloud.cloudquotas_v1.types.resources import ( + DimensionsInfo, + QuotaConfig, + QuotaDetails, + QuotaIncreaseEligibility, + QuotaInfo, + QuotaPreference, + QuotaSafetyCheck, +) + +__all__ = ( + "CloudQuotasClient", + "CloudQuotasAsyncClient", + "CreateQuotaPreferenceRequest", + "GetQuotaInfoRequest", + "GetQuotaPreferenceRequest", + "ListQuotaInfosRequest", + "ListQuotaInfosResponse", + "ListQuotaPreferencesRequest", + "ListQuotaPreferencesResponse", + "UpdateQuotaPreferenceRequest", + "DimensionsInfo", + "QuotaConfig", + "QuotaDetails", + "QuotaIncreaseEligibility", + "QuotaInfo", + "QuotaPreference", + "QuotaSafetyCheck", +) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py new file mode 100644 index 000000000000..20d1d778beb7 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/py.typed b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/py.typed new file mode 100644 index 000000000000..13b6e7a7c797 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-quotas package uses inline types. diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/__init__.py new file mode 100644 index 000000000000..c8a5eb8c6967 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_quotas import CloudQuotasAsyncClient, CloudQuotasClient +from .types.cloudquotas import ( + CreateQuotaPreferenceRequest, + GetQuotaInfoRequest, + GetQuotaPreferenceRequest, + ListQuotaInfosRequest, + ListQuotaInfosResponse, + ListQuotaPreferencesRequest, + ListQuotaPreferencesResponse, + UpdateQuotaPreferenceRequest, +) +from .types.resources import ( + DimensionsInfo, + QuotaConfig, + QuotaDetails, + QuotaIncreaseEligibility, + QuotaInfo, + QuotaPreference, + QuotaSafetyCheck, +) + +__all__ = ( + "CloudQuotasAsyncClient", + "CloudQuotasClient", + "CreateQuotaPreferenceRequest", + "DimensionsInfo", + "GetQuotaInfoRequest", + "GetQuotaPreferenceRequest", + "ListQuotaInfosRequest", + "ListQuotaInfosResponse", + "ListQuotaPreferencesRequest", + "ListQuotaPreferencesResponse", + "QuotaConfig", + "QuotaDetails", + "QuotaIncreaseEligibility", + "QuotaInfo", + "QuotaPreference", + "QuotaSafetyCheck", + "UpdateQuotaPreferenceRequest", +) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_metadata.json b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_metadata.json new file mode 100644 index 000000000000..dde44e1b52f2 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.cloudquotas_v1", + "protoPackage": "google.api.cloudquotas.v1", + "schema": "1.0", + "services": { + "CloudQuotas": { + "clients": { + "grpc": { + "libraryClient": "CloudQuotasClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudQuotasAsyncClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + }, + "rest": { + "libraryClient": "CloudQuotasClient", + "rpcs": { + "CreateQuotaPreference": { + "methods": [ + "create_quota_preference" + ] + }, + "GetQuotaInfo": { + "methods": [ + "get_quota_info" + ] + }, + "GetQuotaPreference": { + "methods": [ + "get_quota_preference" + ] + }, + "ListQuotaInfos": { + "methods": [ + "list_quota_infos" + ] + }, + "ListQuotaPreferences": { + "methods": [ + "list_quota_preferences" + ] + }, + "UpdateQuotaPreference": { + "methods": [ + "update_quota_preference" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py new file mode 100644 index 000000000000..20d1d778beb7 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/gapic_version.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/py.typed b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/py.typed new file mode 100644 index 000000000000..13b6e7a7c797 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-quotas package uses inline types. diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py new file mode 100644 index 000000000000..98ab9db9f1c6 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudQuotasAsyncClient +from .client import CloudQuotasClient + +__all__ = ( + "CloudQuotasClient", + "CloudQuotasAsyncClient", +) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py new file mode 100644 index 000000000000..454be1192a48 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/async_client.py @@ -0,0 +1,1018 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.cloudquotas_v1.services.cloud_quotas import pagers +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +from .client import CloudQuotasClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudQuotasTransport +from .transports.grpc_asyncio import CloudQuotasGrpcAsyncIOTransport + + +class CloudQuotasAsyncClient: + """The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + """ + + _client: CloudQuotasClient + + DEFAULT_ENDPOINT = CloudQuotasClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudQuotasClient.DEFAULT_MTLS_ENDPOINT + + quota_info_path = staticmethod(CloudQuotasClient.quota_info_path) + parse_quota_info_path = staticmethod(CloudQuotasClient.parse_quota_info_path) + quota_preference_path = staticmethod(CloudQuotasClient.quota_preference_path) + parse_quota_preference_path = staticmethod( + CloudQuotasClient.parse_quota_preference_path + ) + common_billing_account_path = staticmethod( + CloudQuotasClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudQuotasClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudQuotasClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudQuotasClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudQuotasClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudQuotasClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudQuotasClient.common_project_path) + parse_common_project_path = staticmethod( + CloudQuotasClient.parse_common_project_path + ) + common_location_path = staticmethod(CloudQuotasClient.common_location_path) + parse_common_location_path = staticmethod( + CloudQuotasClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasAsyncClient: The constructed client. + """ + return CloudQuotasClient.from_service_account_info.__func__(CloudQuotasAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasAsyncClient: The constructed client. + """ + return CloudQuotasClient.from_service_account_file.__func__(CloudQuotasAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudQuotasClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudQuotasTransport: + """Returns the transport used by the client instance. + + Returns: + CloudQuotasTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudQuotasClient).get_transport_class, type(CloudQuotasClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudQuotasTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud quotas client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudQuotasTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudQuotasClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_quota_infos( + self, + request: Optional[Union[cloudquotas.ListQuotaInfosRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaInfosAsyncPager: + r"""Lists QuotaInfos of all quotas for a given project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest, dict]]): + The request object. Message for requesting list of + QuotaInfos + parent (:class:`str`): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as + 'projects/-') is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosAsyncPager: + Message for response to listing + QuotaInfos + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.ListQuotaInfosRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_quota_infos, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQuotaInfosAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_quota_info( + self, + request: Optional[Union[cloudquotas.GetQuotaInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaInfo: + r"""Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest, dict]]): + The request object. Message for getting a QuotaInfo + name (:class:`str`): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.GetQuotaInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_quota_info, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_quota_preferences( + self, + request: Optional[Union[cloudquotas.ListQuotaPreferencesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaPreferencesAsyncPager: + r"""Lists QuotaPreferences in a given project, folder or + organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest, dict]]): + The request object. Message for requesting list of + QuotaPreferences + parent (:class:`str`): + Required. Parent value of QuotaPreference resources. + Listing across different resource containers (such as + 'projects/-') is not allowed. + + When the value starts with 'folders' or 'organizations', + it lists the QuotaPreferences for org quotas in the + container. It does not list the QuotaPreferences in the + descendant projects of the container. + + Example parents: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesAsyncPager: + Message for response to listing + QuotaPreferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.ListQuotaPreferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_quota_preferences, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQuotaPreferencesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_quota_preference( + self, + request: Optional[Union[cloudquotas.GetQuotaPreferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Gets details of a single QuotaPreference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest, dict]]): + The request object. Message for getting a QuotaPreference + name (:class:`str`): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.GetQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_quota_preference( + self, + request: Optional[Union[cloudquotas.CreateQuotaPreferenceRequest, dict]] = None, + *, + parent: Optional[str] = None, + quota_preference: Optional[resources.QuotaPreference] = None, + quota_preference_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Creates a new QuotaPreference that declares the + desired value for a quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = await client.create_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest, dict]]): + The request object. Message for creating a + QuotaPreference + parent (:class:`str`): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference (:class:`google.cloud.cloudquotas_v1.types.QuotaPreference`): + Required. The resource being created + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference_id (:class:`str`): + Optional. Id of the requesting + object, must be unique under its parent. + If client does not set this field, the + service will generate one. + + This corresponds to the ``quota_preference_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, quota_preference, quota_preference_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.CreateQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if quota_preference is not None: + request.quota_preference = quota_preference + if quota_preference_id is not None: + request.quota_preference_id = quota_preference_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_quota_preference( + self, + request: Optional[Union[cloudquotas.UpdateQuotaPreferenceRequest, dict]] = None, + *, + quota_preference: Optional[resources.QuotaPreference] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + async def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = await client.update_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest, dict]]): + The request object. Message for updating a + QuotaPreference + quota_preference (:class:`google.cloud.cloudquotas_v1.types.QuotaPreference`): + Required. The resource being updated + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([quota_preference, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudquotas.UpdateQuotaPreferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if quota_preference is not None: + request.quota_preference = quota_preference + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_quota_preference, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("quota_preference.name", request.quota_preference.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudQuotasAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudQuotasAsyncClient",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py new file mode 100644 index 000000000000..3834e53291df --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/client.py @@ -0,0 +1,1222 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.cloudquotas_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.cloudquotas_v1.services.cloud_quotas import pagers +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +from .transports.base import DEFAULT_CLIENT_INFO, CloudQuotasTransport +from .transports.grpc import CloudQuotasGrpcTransport +from .transports.grpc_asyncio import CloudQuotasGrpcAsyncIOTransport +from .transports.rest import CloudQuotasRestTransport + + +class CloudQuotasClientMeta(type): + """Metaclass for the CloudQuotas client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudQuotasTransport]] + _transport_registry["grpc"] = CloudQuotasGrpcTransport + _transport_registry["grpc_asyncio"] = CloudQuotasGrpcAsyncIOTransport + _transport_registry["rest"] = CloudQuotasRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudQuotasTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudQuotasClient(metaclass=CloudQuotasClientMeta): + """The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudquotas.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudQuotasClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudQuotasTransport: + """Returns the transport used by the client instance. + + Returns: + CloudQuotasTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def quota_info_path( + project: str, + location: str, + service: str, + quota_info: str, + ) -> str: + """Returns a fully-qualified quota_info string.""" + return "projects/{project}/locations/{location}/services/{service}/quotaInfos/{quota_info}".format( + project=project, + location=location, + service=service, + quota_info=quota_info, + ) + + @staticmethod + def parse_quota_info_path(path: str) -> Dict[str, str]: + """Parses a quota_info path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/quotaInfos/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def quota_preference_path( + project: str, + location: str, + quota_preference: str, + ) -> str: + """Returns a fully-qualified quota_preference string.""" + return "projects/{project}/locations/{location}/quotaPreferences/{quota_preference}".format( + project=project, + location=location, + quota_preference=quota_preference, + ) + + @staticmethod + def parse_quota_preference_path(path: str) -> Dict[str, str]: + """Parses a quota_preference path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/quotaPreferences/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudQuotasTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud quotas client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudQuotasTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudQuotasTransport): + # transport is a CloudQuotasTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_quota_infos( + self, + request: Optional[Union[cloudquotas.ListQuotaInfosRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaInfosPager: + r"""Lists QuotaInfos of all quotas for a given project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest, dict]): + The request object. Message for requesting list of + QuotaInfos + parent (str): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as + 'projects/-') is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosPager: + Message for response to listing + QuotaInfos + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.ListQuotaInfosRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.ListQuotaInfosRequest): + request = cloudquotas.ListQuotaInfosRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_quota_infos] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQuotaInfosPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_quota_info( + self, + request: Optional[Union[cloudquotas.GetQuotaInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaInfo: + r"""Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest, dict]): + The request object. Message for getting a QuotaInfo + name (str): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.GetQuotaInfoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.GetQuotaInfoRequest): + request = cloudquotas.GetQuotaInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_quota_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_quota_preferences( + self, + request: Optional[Union[cloudquotas.ListQuotaPreferencesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQuotaPreferencesPager: + r"""Lists QuotaPreferences in a given project, folder or + organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest, dict]): + The request object. Message for requesting list of + QuotaPreferences + parent (str): + Required. Parent value of QuotaPreference resources. + Listing across different resource containers (such as + 'projects/-') is not allowed. + + When the value starts with 'folders' or 'organizations', + it lists the QuotaPreferences for org quotas in the + container. It does not list the QuotaPreferences in the + descendant projects of the container. + + Example parents: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesPager: + Message for response to listing + QuotaPreferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.ListQuotaPreferencesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.ListQuotaPreferencesRequest): + request = cloudquotas.ListQuotaPreferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_quota_preferences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQuotaPreferencesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_quota_preference( + self, + request: Optional[Union[cloudquotas.GetQuotaPreferenceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Gets details of a single QuotaPreference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest, dict]): + The request object. Message for getting a QuotaPreference + name (str): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.GetQuotaPreferenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.GetQuotaPreferenceRequest): + request = cloudquotas.GetQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_quota_preference( + self, + request: Optional[Union[cloudquotas.CreateQuotaPreferenceRequest, dict]] = None, + *, + parent: Optional[str] = None, + quota_preference: Optional[resources.QuotaPreference] = None, + quota_preference_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Creates a new QuotaPreference that declares the + desired value for a quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = client.create_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest, dict]): + The request object. Message for creating a + QuotaPreference + parent (str): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being created + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + quota_preference_id (str): + Optional. Id of the requesting + object, must be unique under its parent. + If client does not set this field, the + service will generate one. + + This corresponds to the ``quota_preference_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, quota_preference, quota_preference_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.CreateQuotaPreferenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.CreateQuotaPreferenceRequest): + request = cloudquotas.CreateQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if quota_preference is not None: + request.quota_preference = quota_preference + if quota_preference_id is not None: + request.quota_preference_id = quota_preference_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_quota_preference( + self, + request: Optional[Union[cloudquotas.UpdateQuotaPreferenceRequest, dict]] = None, + *, + quota_preference: Optional[resources.QuotaPreference] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudquotas_v1 + + def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = client.update_quota_preference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest, dict]): + The request object. Message for updating a + QuotaPreference + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being updated + This corresponds to the ``quota_preference`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.cloudquotas_v1.types.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([quota_preference, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudquotas.UpdateQuotaPreferenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudquotas.UpdateQuotaPreferenceRequest): + request = cloudquotas.UpdateQuotaPreferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if quota_preference is not None: + request.quota_preference = quota_preference + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_quota_preference] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("quota_preference.name", request.quota_preference.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudQuotasClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudQuotasClient",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py new file mode 100644 index 000000000000..8beb6b6d5497 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + + +class ListQuotaInfosPager: + """A pager for iterating through ``list_quota_infos`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` object, and + provides an ``__iter__`` method to iterate through its + ``quota_infos`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQuotaInfos`` requests and continue to iterate + through the ``quota_infos`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudquotas.ListQuotaInfosResponse], + request: cloudquotas.ListQuotaInfosRequest, + response: cloudquotas.ListQuotaInfosResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaInfosRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudquotas.ListQuotaInfosResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.QuotaInfo]: + for page in self.pages: + yield from page.quota_infos + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQuotaInfosAsyncPager: + """A pager for iterating through ``list_quota_infos`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``quota_infos`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQuotaInfos`` requests and continue to iterate + through the ``quota_infos`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudquotas.ListQuotaInfosResponse]], + request: cloudquotas.ListQuotaInfosRequest, + response: cloudquotas.ListQuotaInfosResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaInfosResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaInfosRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudquotas.ListQuotaInfosResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.QuotaInfo]: + async def async_generator(): + async for page in self.pages: + for response in page.quota_infos: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQuotaPreferencesPager: + """A pager for iterating through ``list_quota_preferences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``quota_preferences`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQuotaPreferences`` requests and continue to iterate + through the ``quota_preferences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudquotas.ListQuotaPreferencesResponse], + request: cloudquotas.ListQuotaPreferencesRequest, + response: cloudquotas.ListQuotaPreferencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaPreferencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudquotas.ListQuotaPreferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.QuotaPreference]: + for page in self.pages: + yield from page.quota_preferences + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQuotaPreferencesAsyncPager: + """A pager for iterating through ``list_quota_preferences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``quota_preferences`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQuotaPreferences`` requests and continue to iterate + through the ``quota_preferences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudquotas.ListQuotaPreferencesResponse]], + request: cloudquotas.ListQuotaPreferencesRequest, + response: cloudquotas.ListQuotaPreferencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest): + The initial request object. + response (google.cloud.cloudquotas_v1.types.ListQuotaPreferencesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudquotas.ListQuotaPreferencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudquotas.ListQuotaPreferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.QuotaPreference]: + async def async_generator(): + async for page in self.pages: + for response in page.quota_preferences: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py new file mode 100644 index 000000000000..f93b3269bbc2 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudQuotasTransport +from .grpc import CloudQuotasGrpcTransport +from .grpc_asyncio import CloudQuotasGrpcAsyncIOTransport +from .rest import CloudQuotasRestInterceptor, CloudQuotasRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudQuotasTransport]] +_transport_registry["grpc"] = CloudQuotasGrpcTransport +_transport_registry["grpc_asyncio"] = CloudQuotasGrpcAsyncIOTransport +_transport_registry["rest"] = CloudQuotasRestTransport + +__all__ = ( + "CloudQuotasTransport", + "CloudQuotasGrpcTransport", + "CloudQuotasGrpcAsyncIOTransport", + "CloudQuotasRestTransport", + "CloudQuotasRestInterceptor", +) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py new file mode 100644 index 000000000000..bfe7a26d5f4b --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/base.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.cloudquotas_v1 import gapic_version as package_version +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudQuotasTransport(abc.ABC): + """Abstract transport class for CloudQuotas.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudquotas.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_quota_infos: gapic_v1.method.wrap_method( + self.list_quota_infos, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_info: gapic_v1.method.wrap_method( + self.get_quota_info, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_quota_preferences: gapic_v1.method.wrap_method( + self.list_quota_preferences, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_quota_preference: gapic_v1.method.wrap_method( + self.get_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_quota_preference: gapic_v1.method.wrap_method( + self.create_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_quota_preference: gapic_v1.method.wrap_method( + self.update_quota_preference, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_quota_infos( + self, + ) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + Union[ + cloudquotas.ListQuotaInfosResponse, + Awaitable[cloudquotas.ListQuotaInfosResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_quota_info( + self, + ) -> Callable[ + [cloudquotas.GetQuotaInfoRequest], + Union[resources.QuotaInfo, Awaitable[resources.QuotaInfo]], + ]: + raise NotImplementedError() + + @property + def list_quota_preferences( + self, + ) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + Union[ + cloudquotas.ListQuotaPreferencesResponse, + Awaitable[cloudquotas.ListQuotaPreferencesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_quota_preference( + self, + ) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], + Union[resources.QuotaPreference, Awaitable[resources.QuotaPreference]], + ]: + raise NotImplementedError() + + @property + def create_quota_preference( + self, + ) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], + Union[resources.QuotaPreference, Awaitable[resources.QuotaPreference]], + ]: + raise NotImplementedError() + + @property + def update_quota_preference( + self, + ) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], + Union[resources.QuotaPreference, Awaitable[resources.QuotaPreference]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudQuotasTransport",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py new file mode 100644 index 000000000000..f1cc251635ac --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +from .base import DEFAULT_CLIENT_INFO, CloudQuotasTransport + + +class CloudQuotasGrpcTransport(CloudQuotasTransport): + """gRPC backend transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudquotas.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudquotas.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_quota_infos( + self, + ) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], cloudquotas.ListQuotaInfosResponse + ]: + r"""Return a callable for the list quota infos method over gRPC. + + Lists QuotaInfos of all quotas for a given project, + folder or organization. + + Returns: + Callable[[~.ListQuotaInfosRequest], + ~.ListQuotaInfosResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_quota_infos" not in self._stubs: + self._stubs["list_quota_infos"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/ListQuotaInfos", + request_serializer=cloudquotas.ListQuotaInfosRequest.serialize, + response_deserializer=cloudquotas.ListQuotaInfosResponse.deserialize, + ) + return self._stubs["list_quota_infos"] + + @property + def get_quota_info( + self, + ) -> Callable[[cloudquotas.GetQuotaInfoRequest], resources.QuotaInfo]: + r"""Return a callable for the get quota info method over gRPC. + + Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + Returns: + Callable[[~.GetQuotaInfoRequest], + ~.QuotaInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_quota_info" not in self._stubs: + self._stubs["get_quota_info"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/GetQuotaInfo", + request_serializer=cloudquotas.GetQuotaInfoRequest.serialize, + response_deserializer=resources.QuotaInfo.deserialize, + ) + return self._stubs["get_quota_info"] + + @property + def list_quota_preferences( + self, + ) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + cloudquotas.ListQuotaPreferencesResponse, + ]: + r"""Return a callable for the list quota preferences method over gRPC. + + Lists QuotaPreferences in a given project, folder or + organization. + + Returns: + Callable[[~.ListQuotaPreferencesRequest], + ~.ListQuotaPreferencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_quota_preferences" not in self._stubs: + self._stubs["list_quota_preferences"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/ListQuotaPreferences", + request_serializer=cloudquotas.ListQuotaPreferencesRequest.serialize, + response_deserializer=cloudquotas.ListQuotaPreferencesResponse.deserialize, + ) + return self._stubs["list_quota_preferences"] + + @property + def get_quota_preference( + self, + ) -> Callable[[cloudquotas.GetQuotaPreferenceRequest], resources.QuotaPreference]: + r"""Return a callable for the get quota preference method over gRPC. + + Gets details of a single QuotaPreference. + + Returns: + Callable[[~.GetQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_quota_preference" not in self._stubs: + self._stubs["get_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/GetQuotaPreference", + request_serializer=cloudquotas.GetQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["get_quota_preference"] + + @property + def create_quota_preference( + self, + ) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], resources.QuotaPreference + ]: + r"""Return a callable for the create quota preference method over gRPC. + + Creates a new QuotaPreference that declares the + desired value for a quota. + + Returns: + Callable[[~.CreateQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_quota_preference" not in self._stubs: + self._stubs["create_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/CreateQuotaPreference", + request_serializer=cloudquotas.CreateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["create_quota_preference"] + + @property + def update_quota_preference( + self, + ) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], resources.QuotaPreference + ]: + r"""Return a callable for the update quota preference method over gRPC. + + Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + Returns: + Callable[[~.UpdateQuotaPreferenceRequest], + ~.QuotaPreference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_quota_preference" not in self._stubs: + self._stubs["update_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/UpdateQuotaPreference", + request_serializer=cloudquotas.UpdateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["update_quota_preference"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudQuotasGrpcTransport",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bf9fa3b718e0 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/grpc_asyncio.py @@ -0,0 +1,421 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +from .base import DEFAULT_CLIENT_INFO, CloudQuotasTransport +from .grpc import CloudQuotasGrpcTransport + + +class CloudQuotasGrpcAsyncIOTransport(CloudQuotasTransport): + """gRPC AsyncIO backend transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudquotas.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudquotas.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_quota_infos( + self, + ) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], + Awaitable[cloudquotas.ListQuotaInfosResponse], + ]: + r"""Return a callable for the list quota infos method over gRPC. + + Lists QuotaInfos of all quotas for a given project, + folder or organization. + + Returns: + Callable[[~.ListQuotaInfosRequest], + Awaitable[~.ListQuotaInfosResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_quota_infos" not in self._stubs: + self._stubs["list_quota_infos"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/ListQuotaInfos", + request_serializer=cloudquotas.ListQuotaInfosRequest.serialize, + response_deserializer=cloudquotas.ListQuotaInfosResponse.deserialize, + ) + return self._stubs["list_quota_infos"] + + @property + def get_quota_info( + self, + ) -> Callable[[cloudquotas.GetQuotaInfoRequest], Awaitable[resources.QuotaInfo]]: + r"""Return a callable for the get quota info method over gRPC. + + Retrieve the QuotaInfo of a quota for a project, + folder or organization. + + Returns: + Callable[[~.GetQuotaInfoRequest], + Awaitable[~.QuotaInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_quota_info" not in self._stubs: + self._stubs["get_quota_info"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/GetQuotaInfo", + request_serializer=cloudquotas.GetQuotaInfoRequest.serialize, + response_deserializer=resources.QuotaInfo.deserialize, + ) + return self._stubs["get_quota_info"] + + @property + def list_quota_preferences( + self, + ) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + Awaitable[cloudquotas.ListQuotaPreferencesResponse], + ]: + r"""Return a callable for the list quota preferences method over gRPC. + + Lists QuotaPreferences in a given project, folder or + organization. + + Returns: + Callable[[~.ListQuotaPreferencesRequest], + Awaitable[~.ListQuotaPreferencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_quota_preferences" not in self._stubs: + self._stubs["list_quota_preferences"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/ListQuotaPreferences", + request_serializer=cloudquotas.ListQuotaPreferencesRequest.serialize, + response_deserializer=cloudquotas.ListQuotaPreferencesResponse.deserialize, + ) + return self._stubs["list_quota_preferences"] + + @property + def get_quota_preference( + self, + ) -> Callable[ + [cloudquotas.GetQuotaPreferenceRequest], Awaitable[resources.QuotaPreference] + ]: + r"""Return a callable for the get quota preference method over gRPC. + + Gets details of a single QuotaPreference. + + Returns: + Callable[[~.GetQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_quota_preference" not in self._stubs: + self._stubs["get_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/GetQuotaPreference", + request_serializer=cloudquotas.GetQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["get_quota_preference"] + + @property + def create_quota_preference( + self, + ) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], Awaitable[resources.QuotaPreference] + ]: + r"""Return a callable for the create quota preference method over gRPC. + + Creates a new QuotaPreference that declares the + desired value for a quota. + + Returns: + Callable[[~.CreateQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_quota_preference" not in self._stubs: + self._stubs["create_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/CreateQuotaPreference", + request_serializer=cloudquotas.CreateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["create_quota_preference"] + + @property + def update_quota_preference( + self, + ) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], Awaitable[resources.QuotaPreference] + ]: + r"""Return a callable for the update quota preference method over gRPC. + + Updates the parameters of a single QuotaPreference. + It can updates the config in any states, not just the + ones pending approval. + + Returns: + Callable[[~.UpdateQuotaPreferenceRequest], + Awaitable[~.QuotaPreference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_quota_preference" not in self._stubs: + self._stubs["update_quota_preference"] = self.grpc_channel.unary_unary( + "/google.api.cloudquotas.v1.CloudQuotas/UpdateQuotaPreference", + request_serializer=cloudquotas.UpdateQuotaPreferenceRequest.serialize, + response_deserializer=resources.QuotaPreference.deserialize, + ) + return self._stubs["update_quota_preference"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("CloudQuotasGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py new file mode 100644 index 000000000000..6a50b9956bdf --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/services/cloud_quotas/transports/rest.py @@ -0,0 +1,1052 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + +from .base import CloudQuotasTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudQuotasRestInterceptor: + """Interceptor for CloudQuotas. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudQuotasRestTransport. + + .. code-block:: python + class MyCustomCloudQuotasInterceptor(CloudQuotasRestInterceptor): + def pre_create_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_quota_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_quota_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_quota_infos(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_quota_infos(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_quota_preferences(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_quota_preferences(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_quota_preference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_quota_preference(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudQuotasRestTransport(interceptor=MyCustomCloudQuotasInterceptor()) + client = CloudQuotasClient(transport=transport) + + + """ + + def pre_create_quota_preference( + self, + request: cloudquotas.CreateQuotaPreferenceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.CreateQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_create_quota_preference( + self, response: resources.QuotaPreference + ) -> resources.QuotaPreference: + """Post-rpc interceptor for create_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_get_quota_info( + self, + request: cloudquotas.GetQuotaInfoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.GetQuotaInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_quota_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_get_quota_info(self, response: resources.QuotaInfo) -> resources.QuotaInfo: + """Post-rpc interceptor for get_quota_info + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_get_quota_preference( + self, + request: cloudquotas.GetQuotaPreferenceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.GetQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_get_quota_preference( + self, response: resources.QuotaPreference + ) -> resources.QuotaPreference: + """Post-rpc interceptor for get_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_list_quota_infos( + self, + request: cloudquotas.ListQuotaInfosRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.ListQuotaInfosRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_quota_infos + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_list_quota_infos( + self, response: cloudquotas.ListQuotaInfosResponse + ) -> cloudquotas.ListQuotaInfosResponse: + """Post-rpc interceptor for list_quota_infos + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_list_quota_preferences( + self, + request: cloudquotas.ListQuotaPreferencesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.ListQuotaPreferencesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_quota_preferences + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_list_quota_preferences( + self, response: cloudquotas.ListQuotaPreferencesResponse + ) -> cloudquotas.ListQuotaPreferencesResponse: + """Post-rpc interceptor for list_quota_preferences + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + def pre_update_quota_preference( + self, + request: cloudquotas.UpdateQuotaPreferenceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudquotas.UpdateQuotaPreferenceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_quota_preference + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudQuotas server. + """ + return request, metadata + + def post_update_quota_preference( + self, response: resources.QuotaPreference + ) -> resources.QuotaPreference: + """Post-rpc interceptor for update_quota_preference + + Override in a subclass to manipulate the response + after it is returned by the CloudQuotas server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudQuotasRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudQuotasRestInterceptor + + +class CloudQuotasRestTransport(CloudQuotasTransport): + """REST backend transport for CloudQuotas. + + The Cloud Quotas API is an infrastructure service for Google + Cloud that lets service consumers list and manage their resource + usage limits. + + - List/Get the metadata and current status of the quotas for a + service. + - Create/Update quota preferencess that declare the preferred + quota values. + - Check the status of a quota preference request. + - List/Get pending and historical quota preference. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudquotas.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudQuotasRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudQuotasRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateQuotaPreference(CloudQuotasRestStub): + def __hash__(self): + return hash("CreateQuotaPreference") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.CreateQuotaPreferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Call the create quota preference method over HTTP. + + Args: + request (~.cloudquotas.CreateQuotaPreferenceRequest): + The request object. Message for creating a + QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/quotaPreferences", + "body": "quota_preference", + }, + { + "method": "post", + "uri": "/v1/{parent=folders/*/locations/*}/quotaPreferences", + "body": "quota_preference", + }, + { + "method": "post", + "uri": "/v1/{parent=organizations/*/locations/*}/quotaPreferences", + "body": "quota_preference", + }, + ] + request, metadata = self._interceptor.pre_create_quota_preference( + request, metadata + ) + pb_request = cloudquotas.CreateQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_quota_preference(resp) + return resp + + class _GetQuotaInfo(CloudQuotasRestStub): + def __hash__(self): + return hash("GetQuotaInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.GetQuotaInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaInfo: + r"""Call the get quota info method over HTTP. + + Args: + request (~.cloudquotas.GetQuotaInfoRequest): + The request object. Message for getting a QuotaInfo + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaInfo: + QuotaInfo represents information + about a particular quota for a given + project, folder or organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/services/*/quotaInfos/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/services/*/quotaInfos/*}", + }, + { + "method": "get", + "uri": "/v1/{name=folders/*/locations/*/services/*/quotaInfos/*}", + }, + ] + request, metadata = self._interceptor.pre_get_quota_info(request, metadata) + pb_request = cloudquotas.GetQuotaInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaInfo() + pb_resp = resources.QuotaInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_quota_info(resp) + return resp + + class _GetQuotaPreference(CloudQuotasRestStub): + def __hash__(self): + return hash("GetQuotaPreference") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.GetQuotaPreferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Call the get quota preference method over HTTP. + + Args: + request (~.cloudquotas.GetQuotaPreferenceRequest): + The request object. Message for getting a QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/quotaPreferences/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/quotaPreferences/*}", + }, + { + "method": "get", + "uri": "/v1/{name=folders/*/locations/*/quotaPreferences/*}", + }, + ] + request, metadata = self._interceptor.pre_get_quota_preference( + request, metadata + ) + pb_request = cloudquotas.GetQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_quota_preference(resp) + return resp + + class _ListQuotaInfos(CloudQuotasRestStub): + def __hash__(self): + return hash("ListQuotaInfos") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.ListQuotaInfosRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudquotas.ListQuotaInfosResponse: + r"""Call the list quota infos method over HTTP. + + Args: + request (~.cloudquotas.ListQuotaInfosRequest): + The request object. Message for requesting list of + QuotaInfos + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudquotas.ListQuotaInfosResponse: + Message for response to listing + QuotaInfos + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/services/*}/quotaInfos", + }, + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*/services/*}/quotaInfos", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*/services/*}/quotaInfos", + }, + ] + request, metadata = self._interceptor.pre_list_quota_infos( + request, metadata + ) + pb_request = cloudquotas.ListQuotaInfosRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudquotas.ListQuotaInfosResponse() + pb_resp = cloudquotas.ListQuotaInfosResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_quota_infos(resp) + return resp + + class _ListQuotaPreferences(CloudQuotasRestStub): + def __hash__(self): + return hash("ListQuotaPreferences") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.ListQuotaPreferencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudquotas.ListQuotaPreferencesResponse: + r"""Call the list quota preferences method over HTTP. + + Args: + request (~.cloudquotas.ListQuotaPreferencesRequest): + The request object. Message for requesting list of + QuotaPreferences + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudquotas.ListQuotaPreferencesResponse: + Message for response to listing + QuotaPreferences + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/quotaPreferences", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*}/quotaPreferences", + }, + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/quotaPreferences", + }, + ] + request, metadata = self._interceptor.pre_list_quota_preferences( + request, metadata + ) + pb_request = cloudquotas.ListQuotaPreferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudquotas.ListQuotaPreferencesResponse() + pb_resp = cloudquotas.ListQuotaPreferencesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_quota_preferences(resp) + return resp + + class _UpdateQuotaPreference(CloudQuotasRestStub): + def __hash__(self): + return hash("UpdateQuotaPreference") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudquotas.UpdateQuotaPreferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.QuotaPreference: + r"""Call the update quota preference method over HTTP. + + Args: + request (~.cloudquotas.UpdateQuotaPreferenceRequest): + The request object. Message for updating a + QuotaPreference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.QuotaPreference: + QuotaPreference represents the + preferred quota configuration specified + for a project, folder or organization. + There is only one QuotaPreference + resource for a quota value targeting a + unique set of dimensions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{quota_preference.name=projects/*/locations/*/quotaPreferences/*}", + "body": "quota_preference", + }, + { + "method": "patch", + "uri": "/v1/{quota_preference.name=folders/*/locations/*/quotaPreferences/*}", + "body": "quota_preference", + }, + { + "method": "patch", + "uri": "/v1/{quota_preference.name=organizations/*/locations/*/quotaPreferences/*}", + "body": "quota_preference", + }, + ] + request, metadata = self._interceptor.pre_update_quota_preference( + request, metadata + ) + pb_request = cloudquotas.UpdateQuotaPreferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.QuotaPreference() + pb_resp = resources.QuotaPreference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_quota_preference(resp) + return resp + + @property + def create_quota_preference( + self, + ) -> Callable[ + [cloudquotas.CreateQuotaPreferenceRequest], resources.QuotaPreference + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_quota_info( + self, + ) -> Callable[[cloudquotas.GetQuotaInfoRequest], resources.QuotaInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQuotaInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_quota_preference( + self, + ) -> Callable[[cloudquotas.GetQuotaPreferenceRequest], resources.QuotaPreference]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_quota_infos( + self, + ) -> Callable[ + [cloudquotas.ListQuotaInfosRequest], cloudquotas.ListQuotaInfosResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQuotaInfos(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_quota_preferences( + self, + ) -> Callable[ + [cloudquotas.ListQuotaPreferencesRequest], + cloudquotas.ListQuotaPreferencesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQuotaPreferences(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_quota_preference( + self, + ) -> Callable[ + [cloudquotas.UpdateQuotaPreferenceRequest], resources.QuotaPreference + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateQuotaPreference(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudQuotasRestTransport",) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/__init__.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/__init__.py new file mode 100644 index 000000000000..fd74b4b20d79 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudquotas import ( + CreateQuotaPreferenceRequest, + GetQuotaInfoRequest, + GetQuotaPreferenceRequest, + ListQuotaInfosRequest, + ListQuotaInfosResponse, + ListQuotaPreferencesRequest, + ListQuotaPreferencesResponse, + UpdateQuotaPreferenceRequest, +) +from .resources import ( + DimensionsInfo, + QuotaConfig, + QuotaDetails, + QuotaIncreaseEligibility, + QuotaInfo, + QuotaPreference, + QuotaSafetyCheck, +) + +__all__ = ( + "CreateQuotaPreferenceRequest", + "GetQuotaInfoRequest", + "GetQuotaPreferenceRequest", + "ListQuotaInfosRequest", + "ListQuotaInfosResponse", + "ListQuotaPreferencesRequest", + "ListQuotaPreferencesResponse", + "UpdateQuotaPreferenceRequest", + "DimensionsInfo", + "QuotaConfig", + "QuotaDetails", + "QuotaIncreaseEligibility", + "QuotaInfo", + "QuotaPreference", + "QuotaSafetyCheck", +) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/cloudquotas.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/cloudquotas.py new file mode 100644 index 000000000000..bac7587c3269 --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/cloudquotas.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.cloudquotas_v1.types import resources + +__protobuf__ = proto.module( + package="google.api.cloudquotas.v1", + manifest={ + "ListQuotaInfosRequest", + "ListQuotaInfosResponse", + "GetQuotaInfoRequest", + "ListQuotaPreferencesRequest", + "ListQuotaPreferencesResponse", + "GetQuotaPreferenceRequest", + "CreateQuotaPreferenceRequest", + "UpdateQuotaPreferenceRequest", + }, +) + + +class ListQuotaInfosRequest(proto.Message): + r"""Message for requesting list of QuotaInfos + + Attributes: + parent (str): + Required. Parent value of QuotaInfo resources. Listing + across different resource containers (such as 'projects/-') + is not allowed. + + Example names: + ``projects/123/locations/global/services/compute.googleapis.com`` + ``folders/234/locations/global/services/compute.googleapis.com`` + ``organizations/345/locations/global/services/compute.googleapis.com`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListQuotaInfosResponse(proto.Message): + r"""Message for response to listing QuotaInfos + + Attributes: + quota_infos (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaInfo]): + The list of QuotaInfo + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + quota_infos: MutableSequence[resources.QuotaInfo] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.QuotaInfo, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQuotaInfoRequest(proto.Message): + r"""Message for getting a QuotaInfo + + Attributes: + name (str): + Required. The resource name of the quota info. + + An example name: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListQuotaPreferencesRequest(proto.Message): + r"""Message for requesting list of QuotaPreferences + + Attributes: + parent (str): + Required. Parent value of QuotaPreference resources. Listing + across different resource containers (such as 'projects/-') + is not allowed. + + When the value starts with 'folders' or 'organizations', it + lists the QuotaPreferences for org quotas in the container. + It does not list the QuotaPreferences in the descendant + projects of the container. + + Example parents: ``projects/123/locations/global`` + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filter result QuotaPreferences by their state, + type, create/update time range. + + Example filters: + ``state=PENDING OR state=PENDING_PARTIALLY_GRANTED`` + ``state=PENDING OR state=PENDING_PARTIALLY_GRANTED AND creation_time>2022-12-03T10:30:00`` + + If no filter is provided, returns all pending quota + preferences. + order_by (str): + Optional. How to order of the results. By default, the + results are ordered by create time. + + Example orders: ``type`` ``state, create_time`` + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListQuotaPreferencesResponse(proto.Message): + r"""Message for response to listing QuotaPreferences + + Attributes: + quota_preferences (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaPreference]): + The list of QuotaPreference + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + quota_preferences: MutableSequence[resources.QuotaPreference] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.QuotaPreference, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetQuotaPreferenceRequest(proto.Message): + r"""Message for getting a QuotaPreference + + Attributes: + name (str): + Required. Name of the resource + + Example name: + ``projects/123/locations/global/quota_preferences/my-config-for-us-east1`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateQuotaPreferenceRequest(proto.Message): + r"""Message for creating a QuotaPreference + + Attributes: + parent (str): + Required. Value for parent. + + Example: ``projects/123/locations/global`` + quota_preference_id (str): + Optional. Id of the requesting object, must + be unique under its parent. If client does not + set this field, the service will generate one. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being created + ignore_safety_checks (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaSafetyCheck]): + The list of quota safety checks to be + ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + quota_preference_id: str = proto.Field( + proto.STRING, + number=2, + ) + quota_preference: resources.QuotaPreference = proto.Field( + proto.MESSAGE, + number=3, + message=resources.QuotaPreference, + ) + ignore_safety_checks: MutableSequence[ + resources.QuotaSafetyCheck + ] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=resources.QuotaSafetyCheck, + ) + + +class UpdateQuotaPreferenceRequest(proto.Message): + r"""Message for updating a QuotaPreference + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the QuotaPreference resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + quota_preference (google.cloud.cloudquotas_v1.types.QuotaPreference): + Required. The resource being updated + allow_missing (bool): + Optional. If set to true, and the quota preference is not + found, a new one will be created. In this situation, + ``update_mask`` is ignored. + validate_only (bool): + Optional. If set to true, validate the + request, but do not actually update. Note that a + request being valid does not mean that the + request is guaranteed to be fulfilled. + ignore_safety_checks (MutableSequence[google.cloud.cloudquotas_v1.types.QuotaSafetyCheck]): + The list of quota safety checks to be + ignored. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + quota_preference: resources.QuotaPreference = proto.Field( + proto.MESSAGE, + number=2, + message=resources.QuotaPreference, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + ignore_safety_checks: MutableSequence[ + resources.QuotaSafetyCheck + ] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=resources.QuotaSafetyCheck, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/resources.py b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/resources.py new file mode 100644 index 000000000000..bc9bd9a1958a --- /dev/null +++ b/packages/google-cloud-cloudquotas/google/cloud/cloudquotas_v1/types/resources.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.api.cloudquotas.v1", + manifest={ + "QuotaSafetyCheck", + "QuotaInfo", + "QuotaIncreaseEligibility", + "QuotaPreference", + "QuotaConfig", + "DimensionsInfo", + "QuotaDetails", + }, +) + + +class QuotaSafetyCheck(proto.Enum): + r"""Enumerations of quota safety checks. + + Values: + QUOTA_SAFETY_CHECK_UNSPECIFIED (0): + Unspecified quota safety check. + QUOTA_DECREASE_BELOW_USAGE (1): + Validates that a quota mutation would not + cause the consumer's effective limit to be lower + than the consumer's quota usage. + QUOTA_DECREASE_PERCENTAGE_TOO_HIGH (2): + Validates that a quota mutation would not + cause the consumer's effective limit to decrease + by more than 10 percent. + """ + QUOTA_SAFETY_CHECK_UNSPECIFIED = 0 + QUOTA_DECREASE_BELOW_USAGE = 1 + QUOTA_DECREASE_PERCENTAGE_TOO_HIGH = 2 + + +class QuotaInfo(proto.Message): + r"""QuotaInfo represents information about a particular quota for + a given project, folder or organization. + + Attributes: + name (str): + Resource name of this QuotaInfo. The ID component following + "locations/" must be "global". Example: + ``projects/123/locations/global/services/compute.googleapis.com/quotaInfos/CpusPerProjectPerRegion`` + quota_id (str): + The id of the quota, which is unquie within the service. + Example: ``CpusPerProjectPerRegion`` + metric (str): + The metric of the quota. It specifies the resources + consumption the quota is defined for. Example: + ``compute.googleapis.com/cpus`` + service (str): + The name of the service in which the quota is defined. + Example: ``compute.googleapis.com`` + is_precise (bool): + Whether this is a precise quota. A precise + quota is tracked with absolute precision. In + contrast, an imprecise quota is not tracked with + precision. + refresh_interval (str): + The reset time interval for the quota. + Refresh interval applies to rate quota only. + Example: "minute" for per minute, "day" for per + day, or "10 seconds" for every 10 seconds. + container_type (google.cloud.cloudquotas_v1.types.QuotaInfo.ContainerType): + The container type of the QuotaInfo. + dimensions (MutableSequence[str]): + The dimensions the quota is defined on. + metric_display_name (str): + The display name of the quota metric + quota_display_name (str): + The display name of the quota. + metric_unit (str): + The unit in which the metric value is + reported, e.g., "MByte". + quota_increase_eligibility (google.cloud.cloudquotas_v1.types.QuotaIncreaseEligibility): + Whether it is eligible to request a higher + quota value for this quota. + is_fixed (bool): + Whether the quota value is fixed or + adjustable + dimensions_infos (MutableSequence[google.cloud.cloudquotas_v1.types.DimensionsInfo]): + The collection of dimensions info ordered by + their dimensions from more specific ones to less + specific ones. + is_concurrent (bool): + Whether the quota is a concurrent quota. + Concurrent quotas are enforced on the total + number of concurrent operations in flight at any + given time. + service_request_quota_uri (str): + URI to the page where the user can request more quotas for + the cloud service, such as + https://docs.google.com/spreadsheet/viewform?formkey=abc123&entry_0={email}&entry_1={id}. + Google Developers Console UI replace {email} with the + current user's e-mail, {id} with the current project number, + or organization ID with "organizations/" prefix. For + example, + https://docs.google.com/spreadsheet/viewform?formkey=abc123&entry_0=johndoe@gmail.com&entry_1=25463754, + or + https://docs.google.com/spreadsheet/viewform?formkey=abc123&entry_0=johndoe@gmail.com&entry_1=organizations/26474422. + """ + + class ContainerType(proto.Enum): + r"""The enumeration of the types of a cloud resource container. + + Values: + CONTAINER_TYPE_UNSPECIFIED (0): + Unspecified container type. + PROJECT (1): + consumer project + FOLDER (2): + folder + ORGANIZATION (3): + organization + """ + CONTAINER_TYPE_UNSPECIFIED = 0 + PROJECT = 1 + FOLDER = 2 + ORGANIZATION = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + quota_id: str = proto.Field( + proto.STRING, + number=2, + ) + metric: str = proto.Field( + proto.STRING, + number=3, + ) + service: str = proto.Field( + proto.STRING, + number=4, + ) + is_precise: bool = proto.Field( + proto.BOOL, + number=5, + ) + refresh_interval: str = proto.Field( + proto.STRING, + number=6, + ) + container_type: ContainerType = proto.Field( + proto.ENUM, + number=7, + enum=ContainerType, + ) + dimensions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + metric_display_name: str = proto.Field( + proto.STRING, + number=9, + ) + quota_display_name: str = proto.Field( + proto.STRING, + number=10, + ) + metric_unit: str = proto.Field( + proto.STRING, + number=11, + ) + quota_increase_eligibility: "QuotaIncreaseEligibility" = proto.Field( + proto.MESSAGE, + number=12, + message="QuotaIncreaseEligibility", + ) + is_fixed: bool = proto.Field( + proto.BOOL, + number=13, + ) + dimensions_infos: MutableSequence["DimensionsInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="DimensionsInfo", + ) + is_concurrent: bool = proto.Field( + proto.BOOL, + number=15, + ) + service_request_quota_uri: str = proto.Field( + proto.STRING, + number=17, + ) + + +class QuotaIncreaseEligibility(proto.Message): + r"""Eligibility information regarding requesting increase + adjustment of a quota. + + Attributes: + is_eligible (bool): + Whether a higher quota value can be requested + for the quota. + ineligibility_reason (google.cloud.cloudquotas_v1.types.QuotaIncreaseEligibility.IneligibilityReason): + The reason of why it is ineligible to request increased + value of the quota. If the is_eligible field is true, it + defaults to INELIGIBILITY_REASON_UNSPECIFIED. + """ + + class IneligibilityReason(proto.Enum): + r"""The enumeration of reasons when it is ineligible to request + increase adjustment. + + Values: + INELIGIBILITY_REASON_UNSPECIFIED (0): + Default value when is_eligible is true. + NO_VALID_BILLING_ACCOUNT (1): + The container is not linked with a valid + billing account. + OTHER (2): + Other reasons. + """ + INELIGIBILITY_REASON_UNSPECIFIED = 0 + NO_VALID_BILLING_ACCOUNT = 1 + OTHER = 2 + + is_eligible: bool = proto.Field( + proto.BOOL, + number=1, + ) + ineligibility_reason: IneligibilityReason = proto.Field( + proto.ENUM, + number=2, + enum=IneligibilityReason, + ) + + +class QuotaPreference(proto.Message): + r"""QuotaPreference represents the preferred quota configuration + specified for a project, folder or organization. There is only + one QuotaPreference resource for a quota value targeting a + unique set of dimensions. + + Attributes: + name (str): + Required except in the CREATE requests. The resource name of + the quota preference. The ID component following + "locations/" must be "global". Example: + ``projects/123/locations/global/quotaPreferences/my-config-for-us-east1`` + dimensions (MutableMapping[str, str]): + The dimensions that this quota preference applies to. The + key of the map entry is the name of a dimension, such as + "region", "zone", "network_id", and the value of the map + entry is the dimension value. + + If a dimension is missing from the map of dimensions, the + quota preference applies to all the dimension values except + for those that have other quota preferences configured for + the specific value. + + NOTE: QuotaPreferences can only be applied across all values + of "user" and "resource" dimension. Do not set values for + "user" or "resource" in the dimension map. + + Example: {"provider", "Foo Inc"} where "provider" is a + service specific dimension. + quota_config (google.cloud.cloudquotas_v1.types.QuotaConfig): + Required. Preferred quota configuration. + etag (str): + Optional. The current etag of the quota + preference. If an etag is provided on update and + does not match the current server's etag of the + quota preference, the request will be blocked + and an ABORTED error will be returned. See + https://google.aip.dev/134#etags for more + details on etags. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + service (str): + Required. The name of the service to which + the quota preference is applied. + quota_id (str): + Required. The id of the quota to which the quota preference + is applied. A quota name is unique in the service. Example: + ``CpusPerProjectPerRegion`` + reconciling (bool): + Output only. Is the quota preference pending + Google Cloud approval and fulfillment. + justification (str): + The reason / justification for this quota + preference. + contact_email (str): + Required. Input only. An email address that + can be used for quota related communication + between the Google Cloud and the user in case + the Google Cloud needs further information to + make a decision on whether the user preferred + quota can be granted. + + The Google account for the email address must + have quota update permission for the project, + folder or organization this quota preference is + for. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + quota_config: "QuotaConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="QuotaConfig", + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + service: str = proto.Field( + proto.STRING, + number=7, + ) + quota_id: str = proto.Field( + proto.STRING, + number=8, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=10, + ) + justification: str = proto.Field( + proto.STRING, + number=11, + ) + contact_email: str = proto.Field( + proto.STRING, + number=12, + ) + + +class QuotaConfig(proto.Message): + r"""The preferred quota configuration. + + Attributes: + preferred_value (int): + Required. The preferred value. Must be + greater than or equal to -1. If set to -1, it + means the value is "unlimited". + state_detail (str): + Output only. Optional details about the state + of this quota preference. + granted_value (google.protobuf.wrappers_pb2.Int64Value): + Output only. Granted quota value. + trace_id (str): + Output only. The trace id that the Google + Cloud uses to provision the requested quota. + This trace id may be used by the client to + contact Cloud support to track the state of a + quota preference request. The trace id is only + produced for increase requests and is unique for + each request. The quota decrease requests do not + have a trace id. + annotations (MutableMapping[str, str]): + The annotations map for clients to store + small amounts of arbitrary data. Do not put PII + or other sensitive information here. See + https://google.aip.dev/128#annotations + request_origin (google.cloud.cloudquotas_v1.types.QuotaConfig.Origin): + Output only. The origin of the quota + preference request. + """ + + class Origin(proto.Enum): + r"""The enumeration of the origins of quota preference requests. + + Values: + ORIGIN_UNSPECIFIED (0): + The unspecified value. + CLOUD_CONSOLE (1): + Created through Cloud Console. + AUTO_ADJUSTER (2): + Generated by automatic quota adjustment. + """ + ORIGIN_UNSPECIFIED = 0 + CLOUD_CONSOLE = 1 + AUTO_ADJUSTER = 2 + + preferred_value: int = proto.Field( + proto.INT64, + number=1, + ) + state_detail: str = proto.Field( + proto.STRING, + number=2, + ) + granted_value: wrappers_pb2.Int64Value = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.Int64Value, + ) + trace_id: str = proto.Field( + proto.STRING, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + request_origin: Origin = proto.Field( + proto.ENUM, + number=6, + enum=Origin, + ) + + +class DimensionsInfo(proto.Message): + r"""The detailed quota information such as effective quota value + for a combination of dimensions. + + Attributes: + dimensions (MutableMapping[str, str]): + The map of dimensions for this dimensions + info. The key of a map entry is "region", "zone" + or the name of a service specific dimension, and + the value of a map entry is the value of the + dimension. If a dimension does not appear in + the map of dimensions, the dimensions info + applies to all the dimension values except for + those that have another DimenisonInfo instance + configured for the specific value. + Example: {"provider" : "Foo Inc"} where + "provider" is a service specific dimension of a + quota. + details (google.cloud.cloudquotas_v1.types.QuotaDetails): + Quota details for the specified dimensions. + applicable_locations (MutableSequence[str]): + The applicable regions or zones of this dimensions info. The + field will be set to ['global'] for quotas that are not per + region or per zone. Otherwise, it will be set to the list of + locations this dimension info is applicable to. + """ + + dimensions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + details: "QuotaDetails" = proto.Field( + proto.MESSAGE, + number=2, + message="QuotaDetails", + ) + applicable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class QuotaDetails(proto.Message): + r"""The quota details for a map of dimensions. + + Attributes: + value (int): + The value currently in effect and being + enforced. + """ + + value: int = proto.Field( + proto.INT64, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-cloudquotas/mypy.ini b/packages/google-cloud-cloudquotas/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-cloudquotas/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-cloudquotas/noxfile.py b/packages/google-cloud-cloudquotas/noxfile.py new file mode 100644 index 000000000000..7d3551347c78 --- /dev/null +++ b/packages/google-cloud-cloudquotas/noxfile.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py new file mode 100644 index 000000000000..bf17ce7ad1d1 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = await client.create_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py new file mode 100644 index 000000000000..185d1d32c328 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_create_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.CreateQuotaPreferenceRequest( + parent="parent_value", + quota_preference=quota_preference, + ) + + # Make the request + response = client.create_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py new file mode 100644 index 000000000000..4a24a3f08c1f --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_get_quota_info(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_info(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async] diff --git a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py similarity index 75% rename from packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_async.py rename to packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py index 1c86ed30f6a2..0b5f8f3d848f 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_async.py +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BufferTask +# Snippet for GetQuotaInfo # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-tasks +# python3 -m pip install google-cloud-quotas -# [START cloudtasks_v2beta2_generated_CloudTasks_BufferTask_async] +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import tasks_v2beta2 +from google.cloud import cloudquotas_v1 -async def sample_buffer_task(): +def sample_get_quota_info(): # Create a client - client = tasks_v2beta2.CloudTasksAsyncClient() + client = cloudquotas_v1.CloudQuotasClient() # Initialize request argument(s) - request = tasks_v2beta2.BufferTaskRequest( - queue="queue_value", + request = cloudquotas_v1.GetQuotaInfoRequest( + name="name_value", ) # Make the request - response = await client.buffer_task(request=request) + response = client.get_quota_info(request=request) # Handle the response print(response) -# [END cloudtasks_v2beta2_generated_CloudTasks_BufferTask_async] +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py new file mode 100644 index 000000000000..9be99fb77802 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py new file mode 100644 index 000000000000..08d035c9368a --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_get_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.GetQuotaPreferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py new file mode 100644 index 000000000000..c0a2df8f7fd1 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaInfos +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py new file mode 100644 index 000000000000..27caa8681dbf --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaInfos +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_list_quota_infos(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaInfosRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_infos(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py new file mode 100644 index 000000000000..3390a238f795 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py new file mode 100644 index 000000000000..7e7c209ca407 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListQuotaPreferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_list_quota_preferences(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + request = cloudquotas_v1.ListQuotaPreferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_quota_preferences(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py new file mode 100644 index 000000000000..89bdb11147e2 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +async def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasAsyncClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = await client.update_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py new file mode 100644 index 000000000000..c3068bff3d18 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateQuotaPreference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-quotas + + +# [START cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudquotas_v1 + + +def sample_update_quota_preference(): + # Create a client + client = cloudquotas_v1.CloudQuotasClient() + + # Initialize request argument(s) + quota_preference = cloudquotas_v1.QuotaPreference() + quota_preference.quota_config.preferred_value = 1595 + quota_preference.service = "service_value" + quota_preference.quota_id = "quota_id_value" + quota_preference.contact_email = "contact_email_value" + + request = cloudquotas_v1.UpdateQuotaPreferenceRequest( + quota_preference=quota_preference, + ) + + # Make the request + response = client.update_quota_preference(request=request) + + # Handle the response + print(response) + +# [END cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync] diff --git a/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json new file mode 100644 index 000000000000..1639a7394e94 --- /dev/null +++ b/packages/google-cloud-cloudquotas/samples/generated_samples/snippet_metadata_google.api.cloudquotas.v1.json @@ -0,0 +1,1005 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.api.cloudquotas.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-quotas", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.create_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.CreateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "CreateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "quota_preference_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "create_quota_preference" + }, + "description": "Sample for CreateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.create_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.CreateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "CreateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.CreateQuotaPreferenceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "quota_preference_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "create_quota_preference" + }, + "description": "Sample for CreateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_CreateQuotaPreference_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_create_quota_preference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.get_quota_info", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaInfo", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaInfo", + "shortName": "get_quota_info" + }, + "description": "Sample for GetQuotaInfo", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.get_quota_info", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaInfo", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaInfo", + "shortName": "get_quota_info" + }, + "description": "Sample for GetQuotaInfo", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.get_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "get_quota_preference" + }, + "description": "Sample for GetQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.get_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.GetQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "GetQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.GetQuotaPreferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "get_quota_preference" + }, + "description": "Sample for GetQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_GetQuotaPreference_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_get_quota_preference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.list_quota_infos", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaInfos", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaInfos" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosAsyncPager", + "shortName": "list_quota_infos" + }, + "description": "Sample for ListQuotaInfos", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.list_quota_infos", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaInfos", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaInfos" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaInfosRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaInfosPager", + "shortName": "list_quota_infos" + }, + "description": "Sample for ListQuotaInfos", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaInfos_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_infos_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.list_quota_preferences", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaPreferences", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesAsyncPager", + "shortName": "list_quota_preferences" + }, + "description": "Sample for ListQuotaPreferences", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.list_quota_preferences", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.ListQuotaPreferences", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "ListQuotaPreferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.ListQuotaPreferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.services.cloud_quotas.pagers.ListQuotaPreferencesPager", + "shortName": "list_quota_preferences" + }, + "description": "Sample for ListQuotaPreferences", + "file": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_ListQuotaPreferences_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_list_quota_preferences_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient", + "shortName": "CloudQuotasAsyncClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasAsyncClient.update_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.UpdateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "UpdateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "update_quota_preference" + }, + "description": "Sample for UpdateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient", + "shortName": "CloudQuotasClient" + }, + "fullName": "google.cloud.cloudquotas_v1.CloudQuotasClient.update_quota_preference", + "method": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas.UpdateQuotaPreference", + "service": { + "fullName": "google.api.cloudquotas.v1.CloudQuotas", + "shortName": "CloudQuotas" + }, + "shortName": "UpdateQuotaPreference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudquotas_v1.types.UpdateQuotaPreferenceRequest" + }, + { + "name": "quota_preference", + "type": "google.cloud.cloudquotas_v1.types.QuotaPreference" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.cloudquotas_v1.types.QuotaPreference", + "shortName": "update_quota_preference" + }, + "description": "Sample for UpdateQuotaPreference", + "file": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudquotas_v1_generated_CloudQuotas_UpdateQuotaPreference_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudquotas_v1_generated_cloud_quotas_update_quota_preference_sync.py" + } + ] +} diff --git a/packages/google-cloud-cloudquotas/scripts/decrypt-secrets.sh b/packages/google-cloud-cloudquotas/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-cloudquotas/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-cloudquotas/scripts/fixup_cloudquotas_v1_keywords.py b/packages/google-cloud-cloudquotas/scripts/fixup_cloudquotas_v1_keywords.py new file mode 100644 index 000000000000..926e25526a74 --- /dev/null +++ b/packages/google-cloud-cloudquotas/scripts/fixup_cloudquotas_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudquotasCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_quota_preference': ('parent', 'quota_preference', 'quota_preference_id', 'ignore_safety_checks', ), + 'get_quota_info': ('name', ), + 'get_quota_preference': ('name', ), + 'list_quota_infos': ('parent', 'page_size', 'page_token', ), + 'list_quota_preferences': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_quota_preference': ('quota_preference', 'update_mask', 'allow_missing', 'validate_only', 'ignore_safety_checks', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudquotasCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudquotas client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-cloudquotas/setup.py b/packages/google-cloud-cloudquotas/setup.py new file mode 100644 index 000000000000..c1c5cce2c8d8 --- /dev/null +++ b/packages/google-cloud-cloudquotas/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-quotas" + + +description = "Google Cloud Quotas API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/cloudquotas/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-cloudquotas/testing/.gitignore b/packages/google-cloud-cloudquotas/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.10.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.11.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.12.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.7.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.7.txt new file mode 100644 index 000000000000..185f7d366c2f --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.8.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-cloudquotas/testing/constraints-3.9.txt b/packages/google-cloud-cloudquotas/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-cloudquotas/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-cloudquotas/tests/__init__.py b/packages/google-cloud-cloudquotas/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-cloudquotas/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-cloudquotas/tests/unit/__init__.py b/packages/google-cloud-cloudquotas/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-cloudquotas/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/__init__.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/__init__.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py new file mode 100644 index 000000000000..45d00362a9eb --- /dev/null +++ b/packages/google-cloud-cloudquotas/tests/unit/gapic/cloudquotas_v1/test_cloud_quotas.py @@ -0,0 +1,5560 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.cloudquotas_v1.services.cloud_quotas import ( + CloudQuotasAsyncClient, + CloudQuotasClient, + pagers, + transports, +) +from google.cloud.cloudquotas_v1.types import cloudquotas, resources + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudQuotasClient._get_default_mtls_endpoint(None) is None + assert ( + CloudQuotasClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudQuotasClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudQuotasClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudQuotasClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudQuotasClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudQuotasClient, "grpc"), + (CloudQuotasAsyncClient, "grpc_asyncio"), + (CloudQuotasClient, "rest"), + ], +) +def test_cloud_quotas_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudquotas.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudquotas.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudQuotasGrpcTransport, "grpc"), + (transports.CloudQuotasGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudQuotasRestTransport, "rest"), + ], +) +def test_cloud_quotas_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudQuotasClient, "grpc"), + (CloudQuotasAsyncClient, "grpc_asyncio"), + (CloudQuotasClient, "rest"), + ], +) +def test_cloud_quotas_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudquotas.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudquotas.googleapis.com" + ) + + +def test_cloud_quotas_client_get_transport_class(): + transport = CloudQuotasClient.get_transport_class() + available_transports = [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasRestTransport, + ] + assert transport in available_transports + + transport = CloudQuotasClient.get_transport_class("grpc") + assert transport == transports.CloudQuotasGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc"), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudQuotasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudQuotasClient) +) +@mock.patch.object( + CloudQuotasAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudQuotasAsyncClient), +) +def test_cloud_quotas_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudQuotasClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudQuotasClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", "true"), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", "false"), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", "true"), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudQuotasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudQuotasClient) +) +@mock.patch.object( + CloudQuotasAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudQuotasAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_quotas_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CloudQuotasClient, CloudQuotasAsyncClient]) +@mock.patch.object( + CloudQuotasClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudQuotasClient) +) +@mock.patch.object( + CloudQuotasAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudQuotasAsyncClient), +) +def test_cloud_quotas_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc"), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest"), + ], +) +def test_cloud_quotas_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", grpc_helpers), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudQuotasClient, transports.CloudQuotasRestTransport, "rest", None), + ], +) +def test_cloud_quotas_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_quotas_client_client_options_from_dict(): + with mock.patch( + "google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudQuotasClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport, "grpc", grpc_helpers), + ( + CloudQuotasAsyncClient, + transports.CloudQuotasGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_quotas_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudquotas.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudquotas.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.ListQuotaInfosRequest, + dict, + ], +) +def test_list_quota_infos(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse( + next_page_token="next_page_token_value", + ) + response = client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaInfosRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_quota_infos_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + client.list_quota_infos() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaInfosRequest() + + +@pytest.mark.asyncio +async def test_list_quota_infos_async( + transport: str = "grpc_asyncio", request_type=cloudquotas.ListQuotaInfosRequest +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaInfosResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaInfosRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_quota_infos_async_from_dict(): + await test_list_quota_infos_async(request_type=dict) + + +def test_list_quota_infos_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaInfosRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + call.return_value = cloudquotas.ListQuotaInfosResponse() + client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_quota_infos_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaInfosRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaInfosResponse() + ) + await client.list_quota_infos(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_quota_infos_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_quota_infos( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_quota_infos_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_quota_infos_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaInfosResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaInfosResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_quota_infos( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_quota_infos_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent="parent_value", + ) + + +def test_list_quota_infos_pager(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token="def", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_quota_infos(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaInfo) for i in results) + + +def test_list_quota_infos_pages(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_quota_infos), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token="def", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + pages = list(client.list_quota_infos(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_quota_infos_async_pager(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token="def", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_quota_infos( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.QuotaInfo) for i in responses) + + +@pytest.mark.asyncio +async def test_list_quota_infos_async_pages(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_infos), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token="def", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_quota_infos(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.GetQuotaInfoRequest, + dict, + ], +) +def test_get_quota_info(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo( + name="name_value", + quota_id="quota_id_value", + metric="metric_value", + service="service_value", + is_precise=True, + refresh_interval="refresh_interval_value", + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=["dimensions_value"], + metric_display_name="metric_display_name_value", + quota_display_name="quota_display_name_value", + metric_unit="metric_unit_value", + is_fixed=True, + is_concurrent=True, + service_request_quota_uri="service_request_quota_uri_value", + ) + response = client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == "name_value" + assert response.quota_id == "quota_id_value" + assert response.metric == "metric_value" + assert response.service == "service_value" + assert response.is_precise is True + assert response.refresh_interval == "refresh_interval_value" + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ["dimensions_value"] + assert response.metric_display_name == "metric_display_name_value" + assert response.quota_display_name == "quota_display_name_value" + assert response.metric_unit == "metric_unit_value" + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == "service_request_quota_uri_value" + + +def test_get_quota_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + client.get_quota_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaInfoRequest() + + +@pytest.mark.asyncio +async def test_get_quota_info_async( + transport: str = "grpc_asyncio", request_type=cloudquotas.GetQuotaInfoRequest +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaInfo( + name="name_value", + quota_id="quota_id_value", + metric="metric_value", + service="service_value", + is_precise=True, + refresh_interval="refresh_interval_value", + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=["dimensions_value"], + metric_display_name="metric_display_name_value", + quota_display_name="quota_display_name_value", + metric_unit="metric_unit_value", + is_fixed=True, + is_concurrent=True, + service_request_quota_uri="service_request_quota_uri_value", + ) + ) + response = await client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == "name_value" + assert response.quota_id == "quota_id_value" + assert response.metric == "metric_value" + assert response.service == "service_value" + assert response.is_precise is True + assert response.refresh_interval == "refresh_interval_value" + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ["dimensions_value"] + assert response.metric_display_name == "metric_display_name_value" + assert response.quota_display_name == "quota_display_name_value" + assert response.metric_unit == "metric_unit_value" + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == "service_request_quota_uri_value" + + +@pytest.mark.asyncio +async def test_get_quota_info_async_from_dict(): + await test_get_quota_info_async(request_type=dict) + + +def test_get_quota_info_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + call.return_value = resources.QuotaInfo() + client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_quota_info_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo()) + await client.get_quota_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_quota_info_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_quota_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_quota_info_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_quota_info_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_quota_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.QuotaInfo()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_quota_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_quota_info_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.ListQuotaPreferencesRequest, + dict, + ], +) +def test_list_quota_preferences(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaPreferencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_quota_preferences_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + client.list_quota_preferences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaPreferencesRequest() + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async( + transport: str = "grpc_asyncio", + request_type=cloudquotas.ListQuotaPreferencesRequest, +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaPreferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.ListQuotaPreferencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_from_dict(): + await test_list_quota_preferences_async(request_type=dict) + + +def test_list_quota_preferences_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaPreferencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_quota_preferences_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.ListQuotaPreferencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaPreferencesResponse() + ) + await client.list_quota_preferences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_quota_preferences_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_quota_preferences( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_quota_preferences_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_quota_preferences_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudquotas.ListQuotaPreferencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudquotas.ListQuotaPreferencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_quota_preferences( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_quota_preferences_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent="parent_value", + ) + + +def test_list_quota_preferences_pager(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token="def", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_quota_preferences(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaPreference) for i in results) + + +def test_list_quota_preferences_pages(transport_name: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token="def", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + pages = list(client.list_quota_preferences(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_pager(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token="def", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_quota_preferences( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.QuotaPreference) for i in responses) + + +@pytest.mark.asyncio +async def test_list_quota_preferences_async_pages(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_quota_preferences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token="def", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_quota_preferences(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.GetQuotaPreferenceRequest, + dict, + ], +) +def test_get_quota_preference(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + response = client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_get_quota_preference_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + client.get_quota_preference() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaPreferenceRequest() + + +@pytest.mark.asyncio +async def test_get_quota_preference_async( + transport: str = "grpc_asyncio", request_type=cloudquotas.GetQuotaPreferenceRequest +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + ) + response = await client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.GetQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +@pytest.mark.asyncio +async def test_get_quota_preference_async_from_dict(): + await test_get_quota_preference_async(request_type=dict) + + +def test_get_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaPreferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + call.return_value = resources.QuotaPreference() + client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.GetQuotaPreferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + await client.get_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_quota_preference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_quota_preference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.CreateQuotaPreferenceRequest, + dict, + ], +) +def test_create_quota_preference(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + response = client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.CreateQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_create_quota_preference_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + client.create_quota_preference() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.CreateQuotaPreferenceRequest() + + +@pytest.mark.asyncio +async def test_create_quota_preference_async( + transport: str = "grpc_asyncio", + request_type=cloudquotas.CreateQuotaPreferenceRequest, +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + ) + response = await client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.CreateQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +@pytest.mark.asyncio +async def test_create_quota_preference_async_from_dict(): + await test_create_quota_preference_async(request_type=dict) + + +def test_create_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.CreateQuotaPreferenceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + call.return_value = resources.QuotaPreference() + client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.CreateQuotaPreferenceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + await client.create_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_quota_preference( + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name="name_value") + assert arg == mock_val + arg = args[0].quota_preference_id + mock_val = "quota_preference_id_value" + assert arg == mock_val + + +def test_create_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_quota_preference( + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name="name_value") + assert arg == mock_val + arg = args[0].quota_preference_id + mock_val = "quota_preference_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.UpdateQuotaPreferenceRequest, + dict, + ], +) +def test_update_quota_preference(request_type, transport: str = "grpc"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + response = client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.UpdateQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_update_quota_preference_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + client.update_quota_preference() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.UpdateQuotaPreferenceRequest() + + +@pytest.mark.asyncio +async def test_update_quota_preference_async( + transport: str = "grpc_asyncio", + request_type=cloudquotas.UpdateQuotaPreferenceRequest, +): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + ) + response = await client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudquotas.UpdateQuotaPreferenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +@pytest.mark.asyncio +async def test_update_quota_preference_async_from_dict(): + await test_update_quota_preference_async(request_type=dict) + + +def test_update_quota_preference_field_headers(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.UpdateQuotaPreferenceRequest() + + request.quota_preference.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + call.return_value = resources.QuotaPreference() + client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "quota_preference.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_quota_preference_field_headers_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudquotas.UpdateQuotaPreferenceRequest() + + request.quota_preference.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + await client.update_quota_preference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "quota_preference.name=name_value", + ) in kw["metadata"] + + +def test_update_quota_preference_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_quota_preference( + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_quota_preference_flattened_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_quota_preference_flattened_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_quota_preference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.QuotaPreference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.QuotaPreference() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_quota_preference( + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].quota_preference + mock_val = resources.QuotaPreference(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_quota_preference_flattened_error_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.ListQuotaInfosRequest, + dict, + ], +) +def test_list_quota_infos_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/services/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_quota_infos(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaInfosPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_quota_infos_rest_required_fields( + request_type=cloudquotas.ListQuotaInfosRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_quota_infos._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_quota_infos._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_quota_infos(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_quota_infos_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_quota_infos._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_quota_infos_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_list_quota_infos" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_list_quota_infos" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.ListQuotaInfosRequest.pb( + cloudquotas.ListQuotaInfosRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudquotas.ListQuotaInfosResponse.to_json( + cloudquotas.ListQuotaInfosResponse() + ) + + request = cloudquotas.ListQuotaInfosRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudquotas.ListQuotaInfosResponse() + + client.list_quota_infos( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_quota_infos_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.ListQuotaInfosRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/services/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_quota_infos(request) + + +def test_list_quota_infos_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaInfosResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/services/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaInfosResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_quota_infos(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/services/*}/quotaInfos" + % client.transport._host, + args[1], + ) + + +def test_list_quota_infos_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_infos( + cloudquotas.ListQuotaInfosRequest(), + parent="parent_value", + ) + + +def test_list_quota_infos_rest_pager(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[], + next_page_token="def", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaInfosResponse( + quota_infos=[ + resources.QuotaInfo(), + resources.QuotaInfo(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloudquotas.ListQuotaInfosResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/services/sample3" + } + + pager = client.list_quota_infos(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaInfo) for i in results) + + pages = list(client.list_quota_infos(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.GetQuotaInfoRequest, + dict, + ], +) +def test_get_quota_info_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo( + name="name_value", + quota_id="quota_id_value", + metric="metric_value", + service="service_value", + is_precise=True, + refresh_interval="refresh_interval_value", + container_type=resources.QuotaInfo.ContainerType.PROJECT, + dimensions=["dimensions_value"], + metric_display_name="metric_display_name_value", + quota_display_name="quota_display_name_value", + metric_unit="metric_unit_value", + is_fixed=True, + is_concurrent=True, + service_request_quota_uri="service_request_quota_uri_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_quota_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaInfo) + assert response.name == "name_value" + assert response.quota_id == "quota_id_value" + assert response.metric == "metric_value" + assert response.service == "service_value" + assert response.is_precise is True + assert response.refresh_interval == "refresh_interval_value" + assert response.container_type == resources.QuotaInfo.ContainerType.PROJECT + assert response.dimensions == ["dimensions_value"] + assert response.metric_display_name == "metric_display_name_value" + assert response.quota_display_name == "quota_display_name_value" + assert response.metric_unit == "metric_unit_value" + assert response.is_fixed is True + assert response.is_concurrent is True + assert response.service_request_quota_uri == "service_request_quota_uri_value" + + +def test_get_quota_info_rest_required_fields( + request_type=cloudquotas.GetQuotaInfoRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_quota_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_quota_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_quota_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_quota_info_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_quota_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_quota_info_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_get_quota_info" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_get_quota_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.GetQuotaInfoRequest.pb( + cloudquotas.GetQuotaInfoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.QuotaInfo.to_json(resources.QuotaInfo()) + + request = cloudquotas.GetQuotaInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaInfo() + + client.get_quota_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_quota_info_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.GetQuotaInfoRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_quota_info(request) + + +def test_get_quota_info_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/services/sample3/quotaInfos/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_quota_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/services/*/quotaInfos/*}" + % client.transport._host, + args[1], + ) + + +def test_get_quota_info_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_info( + cloudquotas.GetQuotaInfoRequest(), + name="name_value", + ) + + +def test_get_quota_info_rest_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.ListQuotaPreferencesRequest, + dict, + ], +) +def test_list_quota_preferences_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_quota_preferences(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQuotaPreferencesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_quota_preferences_rest_required_fields( + request_type=cloudquotas.ListQuotaPreferencesRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_quota_preferences._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_quota_preferences._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_quota_preferences(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_quota_preferences_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_quota_preferences._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_quota_preferences_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_list_quota_preferences" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_list_quota_preferences" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.ListQuotaPreferencesRequest.pb( + cloudquotas.ListQuotaPreferencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudquotas.ListQuotaPreferencesResponse.to_json( + cloudquotas.ListQuotaPreferencesResponse() + ) + + request = cloudquotas.ListQuotaPreferencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudquotas.ListQuotaPreferencesResponse() + + client.list_quota_preferences( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_quota_preferences_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.ListQuotaPreferencesRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_quota_preferences(request) + + +def test_list_quota_preferences_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudquotas.ListQuotaPreferencesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudquotas.ListQuotaPreferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_quota_preferences(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/quotaPreferences" + % client.transport._host, + args[1], + ) + + +def test_list_quota_preferences_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_quota_preferences( + cloudquotas.ListQuotaPreferencesRequest(), + parent="parent_value", + ) + + +def test_list_quota_preferences_rest_pager(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + next_page_token="abc", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[], + next_page_token="def", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + ], + next_page_token="ghi", + ), + cloudquotas.ListQuotaPreferencesResponse( + quota_preferences=[ + resources.QuotaPreference(), + resources.QuotaPreference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloudquotas.ListQuotaPreferencesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_quota_preferences(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.QuotaPreference) for i in results) + + pages = list(client.list_quota_preferences(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.GetQuotaPreferenceRequest, + dict, + ], +) +def test_get_quota_preference_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_get_quota_preference_rest_required_fields( + request_type=cloudquotas.GetQuotaPreferenceRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_quota_preference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_get_quota_preference" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_get_quota_preference" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.GetQuotaPreferenceRequest.pb( + cloudquotas.GetQuotaPreferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.QuotaPreference.to_json( + resources.QuotaPreference() + ) + + request = cloudquotas.GetQuotaPreferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.get_quota_preference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_quota_preference_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.GetQuotaPreferenceRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_quota_preference(request) + + +def test_get_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/quotaPreferences/*}" + % client.transport._host, + args[1], + ) + + +def test_get_quota_preference_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_quota_preference( + cloudquotas.GetQuotaPreferenceRequest(), + name="name_value", + ) + + +def test_get_quota_preference_rest_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.CreateQuotaPreferenceRequest, + dict, + ], +) +def test_create_quota_preference_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["quota_preference"] = { + "name": "name_value", + "dimensions": {}, + "quota_config": { + "preferred_value": 1595, + "state_detail": "state_detail_value", + "granted_value": {"value": 541}, + "trace_id": "trace_id_value", + "annotations": {}, + "request_origin": 1, + }, + "etag": "etag_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "service": "service_value", + "quota_id": "quota_id_value", + "reconciling": True, + "justification": "justification_value", + "contact_email": "contact_email_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloudquotas.CreateQuotaPreferenceRequest.meta.fields[ + "quota_preference" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["quota_preference"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["quota_preference"][field])): + del request_init["quota_preference"][field][i][subfield] + else: + del request_init["quota_preference"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_create_quota_preference_rest_required_fields( + request_type=cloudquotas.CreateQuotaPreferenceRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_quota_preference._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "ignore_safety_checks", + "quota_preference_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_quota_preference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "ignoreSafetyChecks", + "quotaPreferenceId", + ) + ) + & set( + ( + "parent", + "quotaPreference", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_create_quota_preference" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_create_quota_preference" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.CreateQuotaPreferenceRequest.pb( + cloudquotas.CreateQuotaPreferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.QuotaPreference.to_json( + resources.QuotaPreference() + ) + + request = cloudquotas.CreateQuotaPreferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.create_quota_preference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_quota_preference_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.CreateQuotaPreferenceRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_quota_preference(request) + + +def test_create_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/quotaPreferences" + % client.transport._host, + args[1], + ) + + +def test_create_quota_preference_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_quota_preference( + cloudquotas.CreateQuotaPreferenceRequest(), + parent="parent_value", + quota_preference=resources.QuotaPreference(name="name_value"), + quota_preference_id="quota_preference_id_value", + ) + + +def test_create_quota_preference_rest_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudquotas.UpdateQuotaPreferenceRequest, + dict, + ], +) +def test_update_quota_preference_rest(request_type): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "quota_preference": { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + } + request_init["quota_preference"] = { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3", + "dimensions": {}, + "quota_config": { + "preferred_value": 1595, + "state_detail": "state_detail_value", + "granted_value": {"value": 541}, + "trace_id": "trace_id_value", + "annotations": {}, + "request_origin": 1, + }, + "etag": "etag_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "service": "service_value", + "quota_id": "quota_id_value", + "reconciling": True, + "justification": "justification_value", + "contact_email": "contact_email_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloudquotas.UpdateQuotaPreferenceRequest.meta.fields[ + "quota_preference" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["quota_preference"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["quota_preference"][field])): + del request_init["quota_preference"][field][i][subfield] + else: + del request_init["quota_preference"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference( + name="name_value", + etag="etag_value", + service="service_value", + quota_id="quota_id_value", + reconciling=True, + justification="justification_value", + contact_email="contact_email_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_quota_preference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.QuotaPreference) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.service == "service_value" + assert response.quota_id == "quota_id_value" + assert response.reconciling is True + assert response.justification == "justification_value" + assert response.contact_email == "contact_email_value" + + +def test_update_quota_preference_rest_required_fields( + request_type=cloudquotas.UpdateQuotaPreferenceRequest, +): + transport_class = transports.CloudQuotasRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_quota_preference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_quota_preference._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "ignore_safety_checks", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_quota_preference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_quota_preference_rest_unset_required_fields(): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_quota_preference._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "ignoreSafetyChecks", + "updateMask", + "validateOnly", + ) + ) + & set(("quotaPreference",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_quota_preference_rest_interceptors(null_interceptor): + transport = transports.CloudQuotasRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudQuotasRestInterceptor(), + ) + client = CloudQuotasClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudQuotasRestInterceptor, "post_update_quota_preference" + ) as post, mock.patch.object( + transports.CloudQuotasRestInterceptor, "pre_update_quota_preference" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudquotas.UpdateQuotaPreferenceRequest.pb( + cloudquotas.UpdateQuotaPreferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.QuotaPreference.to_json( + resources.QuotaPreference() + ) + + request = cloudquotas.UpdateQuotaPreferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.QuotaPreference() + + client.update_quota_preference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_quota_preference_rest_bad_request( + transport: str = "rest", request_type=cloudquotas.UpdateQuotaPreferenceRequest +): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "quota_preference": { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_quota_preference(request) + + +def test_update_quota_preference_rest_flattened(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.QuotaPreference() + + # get arguments that satisfy an http rule for this method + sample_request = { + "quota_preference": { + "name": "projects/sample1/locations/sample2/quotaPreferences/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.QuotaPreference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_quota_preference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{quota_preference.name=projects/*/locations/*/quotaPreferences/*}" + % client.transport._host, + args[1], + ) + + +def test_update_quota_preference_rest_flattened_error(transport: str = "rest"): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_quota_preference( + cloudquotas.UpdateQuotaPreferenceRequest(), + quota_preference=resources.QuotaPreference(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_quota_preference_rest_error(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudQuotasClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudQuotasClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudQuotasGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudQuotasGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + transports.CloudQuotasRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudQuotasClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudQuotasGrpcTransport, + ) + + +def test_cloud_quotas_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudQuotasTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_quotas_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudQuotasTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_quota_infos", + "get_quota_info", + "list_quota_preferences", + "get_quota_preference", + "create_quota_preference", + "update_quota_preference", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_quotas_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudQuotasTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_quotas_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.cloudquotas_v1.services.cloud_quotas.transports.CloudQuotasTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudQuotasTransport() + adc.assert_called_once() + + +def test_cloud_quotas_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudQuotasClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + ], +) +def test_cloud_quotas_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudQuotasGrpcTransport, + transports.CloudQuotasGrpcAsyncIOTransport, + transports.CloudQuotasRestTransport, + ], +) +def test_cloud_quotas_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudQuotasGrpcTransport, grpc_helpers), + (transports.CloudQuotasGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_quotas_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudquotas.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudquotas.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport], +) +def test_cloud_quotas_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_quotas_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudQuotasRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_quotas_host_no_port(transport_name): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudquotas.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudquotas.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudquotas.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_quotas_host_with_port(transport_name): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudquotas.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudquotas.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudquotas.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_quotas_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudQuotasClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudQuotasClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_quota_infos._session + session2 = client2.transport.list_quota_infos._session + assert session1 != session2 + session1 = client1.transport.get_quota_info._session + session2 = client2.transport.get_quota_info._session + assert session1 != session2 + session1 = client1.transport.list_quota_preferences._session + session2 = client2.transport.list_quota_preferences._session + assert session1 != session2 + session1 = client1.transport.get_quota_preference._session + session2 = client2.transport.get_quota_preference._session + assert session1 != session2 + session1 = client1.transport.create_quota_preference._session + session2 = client2.transport.create_quota_preference._session + assert session1 != session2 + session1 = client1.transport.update_quota_preference._session + session2 = client2.transport.update_quota_preference._session + assert session1 != session2 + + +def test_cloud_quotas_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudQuotasGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_quotas_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudQuotasGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport], +) +def test_cloud_quotas_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudQuotasGrpcTransport, transports.CloudQuotasGrpcAsyncIOTransport], +) +def test_cloud_quotas_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_quota_info_path(): + project = "squid" + location = "clam" + service = "whelk" + quota_info = "octopus" + expected = "projects/{project}/locations/{location}/services/{service}/quotaInfos/{quota_info}".format( + project=project, + location=location, + service=service, + quota_info=quota_info, + ) + actual = CloudQuotasClient.quota_info_path(project, location, service, quota_info) + assert expected == actual + + +def test_parse_quota_info_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "service": "cuttlefish", + "quota_info": "mussel", + } + path = CloudQuotasClient.quota_info_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_quota_info_path(path) + assert expected == actual + + +def test_quota_preference_path(): + project = "winkle" + location = "nautilus" + quota_preference = "scallop" + expected = "projects/{project}/locations/{location}/quotaPreferences/{quota_preference}".format( + project=project, + location=location, + quota_preference=quota_preference, + ) + actual = CloudQuotasClient.quota_preference_path( + project, location, quota_preference + ) + assert expected == actual + + +def test_parse_quota_preference_path(): + expected = { + "project": "abalone", + "location": "squid", + "quota_preference": "clam", + } + path = CloudQuotasClient.quota_preference_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_quota_preference_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudQuotasClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudQuotasClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudQuotasClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudQuotasClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudQuotasClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudQuotasClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudQuotasClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudQuotasClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudQuotasClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudQuotasClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudQuotasClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudQuotasTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudQuotasTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudQuotasClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudQuotasAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudQuotasClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudQuotasClient, transports.CloudQuotasGrpcTransport), + (CloudQuotasAsyncClient, transports.CloudQuotasGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-config/CHANGELOG.md b/packages/google-cloud-config/CHANGELOG.md index 42b2d301c9dd..88d8b9a44ab8 100644 --- a/packages/google-cloud-config/CHANGELOG.md +++ b/packages/google-cloud-config/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.2...google-cloud-config-v0.1.3) (2024-01-19) + + +### Features + +* [google-cloud-config] added Terraform Plan ([#12197](https://github.com/googleapis/google-cloud-python/issues/12197)) ([2de325b](https://github.com/googleapis/google-cloud-python/commit/2de325ba6aef85c98c9ebbe03fc6a4ebb2834a12)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.1...google-cloud-config-v0.1.2) (2023-12-07) diff --git a/packages/google-cloud-config/google/cloud/config/__init__.py b/packages/google-cloud-config/google/cloud/config/__init__.py index b86f5ddc9ec0..ebea67cebc62 100644 --- a/packages/google-cloud-config/google/cloud/config/__init__.py +++ b/packages/google-cloud-config/google/cloud/config/__init__.py @@ -23,20 +23,27 @@ from google.cloud.config_v1.types.config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -44,6 +51,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -62,20 +73,27 @@ "ConfigAsyncClient", "ApplyResults", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -83,6 +101,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index cf99f3acb1ee..536d6648a6f0 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/__init__.py index a407a3aa0ff8..0dd38e23e39f 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/__init__.py @@ -22,20 +22,27 @@ from .types.config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -43,6 +50,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -61,20 +72,27 @@ "ApplyResults", "ConfigClient", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -82,6 +100,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json index 65de7b78a0aa..190bb0825b1c 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json @@ -15,11 +15,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -35,6 +45,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -45,6 +60,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -65,6 +85,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" @@ -100,11 +125,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -120,6 +155,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -130,6 +170,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -150,6 +195,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" @@ -185,11 +235,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -205,6 +265,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -215,6 +280,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -235,6 +305,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index cf99f3acb1ee..536d6648a6f0 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 8f6ab9857596..e6752efa8a85 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -50,6 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.config_v1.services.config import pagers from google.cloud.config_v1.types import config @@ -72,6 +73,8 @@ class ConfigAsyncClient: deployment_path = staticmethod(ConfigClient.deployment_path) parse_deployment_path = staticmethod(ConfigClient.parse_deployment_path) + preview_path = staticmethod(ConfigClient.preview_path) + parse_preview_path = staticmethod(ConfigClient.parse_preview_path) resource_path = staticmethod(ConfigClient.resource_path) parse_resource_path = staticmethod(ConfigClient.parse_resource_path) revision_path = staticmethod(ConfigClient.revision_path) @@ -1997,6 +2000,553 @@ async def sample_export_lock_info(): # Done; return the response. return response + async def create_preview( + self, + request: Optional[Union[config.CreatePreviewRequest, dict]] = None, + *, + parent: Optional[str] = None, + preview: Optional[config.Preview] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_create_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.CreatePreviewRequest, dict]]): + The request object. A request to create a preview. + parent (:class:`str`): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + preview (:class:`google.cloud.config_v1.types.Preview`): + Required. [Preview][google.cloud.config.v1.Preview] + resource to be created. + + This corresponds to the ``preview`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, preview]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.CreatePreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if preview is not None: + request.preview = preview + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_preview( + self, + request: Optional[Union[config.GetPreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Gets details about a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_preview(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetPreviewRequest, dict]]): + The request object. A request to get details about a + preview. + name (:class:`str`): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.GetPreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_previews( + self, + request: Optional[Union[config.ListPreviewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPreviewsAsyncPager: + r"""Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_previews(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListPreviewsRequest, dict]]): + The request object. A request to list all previews for a + given project and location. + parent (:class:`str`): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListPreviewsAsyncPager: + A response to a ListPreviews call. Contains a list of + Previews. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ListPreviewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_previews, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPreviewsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_preview( + self, + request: Optional[Union[config.DeletePreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_delete_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeletePreviewRequest, dict]]): + The request object. A request to delete a preview. + name (:class:`str`): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.DeletePreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_preview_result( + self, + request: Optional[Union[config.ExportPreviewResultRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Export [Preview][google.cloud.config.v1.Preview] results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_preview_result(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ExportPreviewResultRequest, dict]]): + The request object. A request to export preview results. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.ExportPreviewResultResponse: + A response to ExportPreviewResult call. Contains preview + results. + + """ + # Create or coerce a protobuf request object. + request = config.ExportPreviewResultRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_preview_result, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index d3ff68354028..354b94abeb93 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -54,6 +54,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.config_v1.services.config import pagers from google.cloud.config_v1.types import config @@ -211,6 +212,28 @@ def parse_deployment_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def preview_path( + project: str, + location: str, + preview: str, + ) -> str: + """Returns a fully-qualified preview string.""" + return "projects/{project}/locations/{location}/previews/{preview}".format( + project=project, + location=location, + preview=preview, + ) + + @staticmethod + def parse_preview_path(path: str) -> Dict[str, str]: + """Parses a preview path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/previews/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def resource_path( project: str, @@ -2328,6 +2351,554 @@ def sample_export_lock_info(): # Done; return the response. return response + def create_preview( + self, + request: Optional[Union[config.CreatePreviewRequest, dict]] = None, + *, + parent: Optional[str] = None, + preview: Optional[config.Preview] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_create_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.CreatePreviewRequest, dict]): + The request object. A request to create a preview. + parent (str): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + preview (google.cloud.config_v1.types.Preview): + Required. [Preview][google.cloud.config.v1.Preview] + resource to be created. + + This corresponds to the ``preview`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, preview]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.CreatePreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.CreatePreviewRequest): + request = config.CreatePreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if preview is not None: + request.preview = preview + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_preview( + self, + request: Optional[Union[config.GetPreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Gets details about a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = client.get_preview(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetPreviewRequest, dict]): + The request object. A request to get details about a + preview. + name (str): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.GetPreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.GetPreviewRequest): + request = config.GetPreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_previews( + self, + request: Optional[Union[config.ListPreviewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPreviewsPager: + r"""Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_previews(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListPreviewsRequest, dict]): + The request object. A request to list all previews for a + given project and location. + parent (str): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListPreviewsPager: + A response to a ListPreviews call. Contains a list of + Previews. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ListPreviewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ListPreviewsRequest): + request = config.ListPreviewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_previews] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPreviewsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_preview( + self, + request: Optional[Union[config.DeletePreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_delete_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.DeletePreviewRequest, dict]): + The request object. A request to delete a preview. + name (str): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.DeletePreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.DeletePreviewRequest): + request = config.DeletePreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_preview_result( + self, + request: Optional[Union[config.ExportPreviewResultRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Export [Preview][google.cloud.config.v1.Preview] results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_preview_result(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ExportPreviewResultRequest, dict]): + The request object. A request to export preview results. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.ExportPreviewResultResponse: + A response to ExportPreviewResult call. Contains preview + results. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a config.ExportPreviewResultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ExportPreviewResultRequest): + request = config.ExportPreviewResultRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_preview_result] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConfigClient": return self diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py index d0ccc958ea38..8f74585f587d 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py @@ -409,3 +409,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPreviewsPager: + """A pager for iterating through ``list_previews`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListPreviewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``previews`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPreviews`` requests and continue to iterate + through the ``previews`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListPreviewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListPreviewsResponse], + request: config.ListPreviewsRequest, + response: config.ListPreviewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListPreviewsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListPreviewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListPreviewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListPreviewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[config.Preview]: + for page in self.pages: + yield from page.previews + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPreviewsAsyncPager: + """A pager for iterating through ``list_previews`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListPreviewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``previews`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPreviews`` requests and continue to iterate + through the ``previews`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListPreviewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListPreviewsResponse]], + request: config.ListPreviewsRequest, + response: config.ListPreviewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListPreviewsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListPreviewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListPreviewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListPreviewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.Preview]: + async def async_generator(): + async for page in self.pages: + for response in page.previews: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py index a5139f21b4ce..5ca64f211d1a 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py @@ -207,6 +207,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_preview: gapic_v1.method.wrap_method( + self.create_preview, + default_timeout=None, + client_info=client_info, + ), + self.get_preview: gapic_v1.method.wrap_method( + self.get_preview, + default_timeout=None, + client_info=client_info, + ), + self.list_previews: gapic_v1.method.wrap_method( + self.list_previews, + default_timeout=None, + client_info=client_info, + ), + self.delete_preview: gapic_v1.method.wrap_method( + self.delete_preview, + default_timeout=None, + client_info=client_info, + ), + self.export_preview_result: gapic_v1.method.wrap_method( + self.export_preview_result, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -367,6 +392,53 @@ def export_lock_info( ]: raise NotImplementedError() + @property + def create_preview( + self, + ) -> Callable[ + [config.CreatePreviewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_preview( + self, + ) -> Callable[ + [config.GetPreviewRequest], Union[config.Preview, Awaitable[config.Preview]] + ]: + raise NotImplementedError() + + @property + def list_previews( + self, + ) -> Callable[ + [config.ListPreviewsRequest], + Union[config.ListPreviewsResponse, Awaitable[config.ListPreviewsResponse]], + ]: + raise NotImplementedError() + + @property + def delete_preview( + self, + ) -> Callable[ + [config.DeletePreviewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], + Union[ + config.ExportPreviewResultResponse, + Awaitable[config.ExportPreviewResultResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py index 16b83b8ff3fa..da503d656520 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py @@ -670,6 +670,137 @@ def export_lock_info( ) return self._stubs["export_lock_info"] + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], operations_pb2.Operation]: + r"""Return a callable for the create preview method over gRPC. + + Creates a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.CreatePreviewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_preview" not in self._stubs: + self._stubs["create_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreatePreview", + request_serializer=config.CreatePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_preview"] + + @property + def get_preview(self) -> Callable[[config.GetPreviewRequest], config.Preview]: + r"""Return a callable for the get preview method over gRPC. + + Gets details about a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.GetPreviewRequest], + ~.Preview]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_preview" not in self._stubs: + self._stubs["get_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetPreview", + request_serializer=config.GetPreviewRequest.serialize, + response_deserializer=config.Preview.deserialize, + ) + return self._stubs["get_preview"] + + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], config.ListPreviewsResponse]: + r"""Return a callable for the list previews method over gRPC. + + Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + Returns: + Callable[[~.ListPreviewsRequest], + ~.ListPreviewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_previews" not in self._stubs: + self._stubs["list_previews"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListPreviews", + request_serializer=config.ListPreviewsRequest.serialize, + response_deserializer=config.ListPreviewsResponse.deserialize, + ) + return self._stubs["list_previews"] + + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], operations_pb2.Operation]: + r"""Return a callable for the delete preview method over gRPC. + + Deletes a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.DeletePreviewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_preview" not in self._stubs: + self._stubs["delete_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeletePreview", + request_serializer=config.DeletePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_preview"] + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], config.ExportPreviewResultResponse + ]: + r"""Return a callable for the export preview result method over gRPC. + + Export [Preview][google.cloud.config.v1.Preview] results. + + Returns: + Callable[[~.ExportPreviewResultRequest], + ~.ExportPreviewResultResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_preview_result" not in self._stubs: + self._stubs["export_preview_result"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportPreviewResult", + request_serializer=config.ExportPreviewResultRequest.serialize, + response_deserializer=config.ExportPreviewResultResponse.deserialize, + ) + return self._stubs["export_preview_result"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py index 84eaa9ba992c..c02df0a05a20 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py @@ -695,6 +695,140 @@ def export_lock_info( ) return self._stubs["export_lock_info"] + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create preview method over gRPC. + + Creates a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.CreatePreviewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_preview" not in self._stubs: + self._stubs["create_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreatePreview", + request_serializer=config.CreatePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_preview"] + + @property + def get_preview( + self, + ) -> Callable[[config.GetPreviewRequest], Awaitable[config.Preview]]: + r"""Return a callable for the get preview method over gRPC. + + Gets details about a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.GetPreviewRequest], + Awaitable[~.Preview]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_preview" not in self._stubs: + self._stubs["get_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetPreview", + request_serializer=config.GetPreviewRequest.serialize, + response_deserializer=config.Preview.deserialize, + ) + return self._stubs["get_preview"] + + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], Awaitable[config.ListPreviewsResponse]]: + r"""Return a callable for the list previews method over gRPC. + + Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + Returns: + Callable[[~.ListPreviewsRequest], + Awaitable[~.ListPreviewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_previews" not in self._stubs: + self._stubs["list_previews"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListPreviews", + request_serializer=config.ListPreviewsRequest.serialize, + response_deserializer=config.ListPreviewsResponse.deserialize, + ) + return self._stubs["list_previews"] + + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete preview method over gRPC. + + Deletes a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.DeletePreviewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_preview" not in self._stubs: + self._stubs["delete_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeletePreview", + request_serializer=config.DeletePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_preview"] + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], + Awaitable[config.ExportPreviewResultResponse], + ]: + r"""Return a callable for the export preview result method over gRPC. + + Export [Preview][google.cloud.config.v1.Preview] results. + + Returns: + Callable[[~.ExportPreviewResultRequest], + Awaitable[~.ExportPreviewResultResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_preview_result" not in self._stubs: + self._stubs["export_preview_result"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportPreviewResult", + request_serializer=config.ExportPreviewResultRequest.serialize, + response_deserializer=config.ExportPreviewResultResponse.deserialize, + ) + return self._stubs["export_preview_result"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py index 830a3d73360d..321e415304ea 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -83,6 +83,14 @@ def post_create_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_create_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_deployment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -91,6 +99,14 @@ def post_delete_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -111,6 +127,14 @@ def post_export_lock_info(self, response): logging.log(f"Received response: {response}") return response + def pre_export_preview_result(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_preview_result(self, response): + logging.log(f"Received response: {response}") + return response + def pre_export_revision_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +151,14 @@ def post_get_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_get_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_resource(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -159,6 +191,14 @@ def post_list_deployments(self, response): logging.log(f"Received response: {response}") return response + def pre_list_previews(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_previews(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_resources(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -228,6 +268,27 @@ def post_create_deployment( """ return response + def pre_create_preview( + self, request: config.CreatePreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.CreatePreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_create_preview( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_delete_deployment( self, request: config.DeleteDeploymentRequest, @@ -251,6 +312,27 @@ def post_delete_deployment( """ return response + def pre_delete_preview( + self, request: config.DeletePreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.DeletePreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_delete_preview( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_delete_statefile( self, request: config.DeleteStatefileRequest, @@ -305,6 +387,29 @@ def post_export_lock_info(self, response: config.LockInfo) -> config.LockInfo: """ return response + def pre_export_preview_result( + self, + request: config.ExportPreviewResultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ExportPreviewResultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_preview_result + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_export_preview_result( + self, response: config.ExportPreviewResultResponse + ) -> config.ExportPreviewResultResponse: + """Post-rpc interceptor for export_preview_result + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_export_revision_statefile( self, request: config.ExportRevisionStatefileRequest, @@ -347,6 +452,25 @@ def post_get_deployment(self, response: config.Deployment) -> config.Deployment: """ return response + def pre_get_preview( + self, request: config.GetPreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.GetPreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_preview(self, response: config.Preview) -> config.Preview: + """Post-rpc interceptor for get_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_get_resource( self, request: config.GetResourceRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[config.GetResourceRequest, Sequence[Tuple[str, str]]]: @@ -429,6 +553,27 @@ def post_list_deployments( """ return response + def pre_list_previews( + self, request: config.ListPreviewsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.ListPreviewsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_previews + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_previews( + self, response: config.ListPreviewsResponse + ) -> config.ListPreviewsResponse: + """Post-rpc interceptor for list_previews + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_list_resources( self, request: config.ListResourcesRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[config.ListResourcesRequest, Sequence[Tuple[str, str]]]: @@ -990,6 +1135,102 @@ def __call__( resp = self._interceptor.post_create_deployment(resp) return resp + class _CreatePreview(ConfigRestStub): + def __hash__(self): + return hash("CreatePreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.CreatePreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create preview method over HTTP. + + Args: + request (~.config.CreatePreviewRequest): + The request object. A request to create a preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/previews", + "body": "preview", + }, + ] + request, metadata = self._interceptor.pre_create_preview(request, metadata) + pb_request = config.CreatePreviewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_preview(resp) + return resp + class _DeleteDeployment(ConfigRestStub): def __hash__(self): return hash("DeleteDeployment") @@ -1037,10 +1278,97 @@ def __call__( "uri": "/v1/{name=projects/*/locations/*/deployments/*}", }, ] - request, metadata = self._interceptor.pre_delete_deployment( - request, metadata - ) - pb_request = config.DeleteDeploymentRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) + pb_request = config.DeleteDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_deployment(resp) + return resp + + class _DeletePreview(ConfigRestStub): + def __hash__(self): + return hash("DeletePreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.DeletePreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete preview method over HTTP. + + Args: + request (~.config.DeletePreviewRequest): + The request object. A request to delete a preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/previews/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_preview(request, metadata) + pb_request = config.DeletePreviewRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1076,7 +1404,7 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_deployment(resp) + resp = self._interceptor.post_delete_preview(resp) return resp class _DeleteStatefile(ConfigRestStub): @@ -1358,6 +1686,105 @@ def __call__( resp = self._interceptor.post_export_lock_info(resp) return resp + class _ExportPreviewResult(ConfigRestStub): + def __hash__(self): + return hash("ExportPreviewResult") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ExportPreviewResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Call the export preview result method over HTTP. + + Args: + request (~.config.ExportPreviewResultRequest): + The request object. A request to export preview results. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ExportPreviewResultResponse: + A response to ``ExportPreviewResult`` call. Contains + preview results. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/previews/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_preview_result( + request, metadata + ) + pb_request = config.ExportPreviewResultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ExportPreviewResultResponse() + pb_resp = config.ExportPreviewResultResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_preview_result(resp) + return resp + class _ExportRevisionStatefile(ConfigRestStub): def __hash__(self): return hash("ExportRevisionStatefile") @@ -1548,6 +1975,97 @@ def __call__( resp = self._interceptor.post_get_deployment(resp) return resp + class _GetPreview(ConfigRestStub): + def __hash__(self): + return hash("GetPreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.GetPreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Call the get preview method over HTTP. + + Args: + request (~.config.GetPreviewRequest): + The request object. A request to get details about a + preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/previews/*}", + }, + ] + request, metadata = self._interceptor.pre_get_preview(request, metadata) + pb_request = config.GetPreviewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Preview() + pb_resp = config.Preview.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_preview(resp) + return resp + class _GetResource(ConfigRestStub): def __hash__(self): return hash("GetResource") @@ -1919,6 +2437,95 @@ def __call__( resp = self._interceptor.post_list_deployments(resp) return resp + class _ListPreviews(ConfigRestStub): + def __hash__(self): + return hash("ListPreviews") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ListPreviewsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ListPreviewsResponse: + r"""Call the list previews method over HTTP. + + Args: + request (~.config.ListPreviewsRequest): + The request object. A request to list all previews for a + given project and location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ListPreviewsResponse: + A response to a ``ListPreviews`` call. Contains a list + of Previews. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/previews", + }, + ] + request, metadata = self._interceptor.pre_list_previews(request, metadata) + pb_request = config.ListPreviewsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ListPreviewsResponse() + pb_resp = config.ListPreviewsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_previews(resp) + return resp + class _ListResources(ConfigRestStub): def __hash__(self): return hash("ListResources") @@ -2399,6 +3006,14 @@ def create_deployment( # In C++ this would require a dynamic_cast return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePreview(self._session, self._host, self._interceptor) # type: ignore + @property def delete_deployment( self, @@ -2407,6 +3022,14 @@ def delete_deployment( # In C++ this would require a dynamic_cast return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePreview(self._session, self._host, self._interceptor) # type: ignore + @property def delete_statefile( self, @@ -2431,6 +3054,16 @@ def export_lock_info( # In C++ this would require a dynamic_cast return self._ExportLockInfo(self._session, self._host, self._interceptor) # type: ignore + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], config.ExportPreviewResultResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportPreviewResult(self._session, self._host, self._interceptor) # type: ignore + @property def export_revision_statefile( self, @@ -2447,6 +3080,12 @@ def get_deployment( # In C++ this would require a dynamic_cast return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def get_preview(self) -> Callable[[config.GetPreviewRequest], config.Preview]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPreview(self._session, self._host, self._interceptor) # type: ignore + @property def get_resource(self) -> Callable[[config.GetResourceRequest], config.Resource]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -2475,6 +3114,14 @@ def list_deployments( # In C++ this would require a dynamic_cast return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], config.ListPreviewsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPreviews(self._session, self._host, self._interceptor) # type: ignore + @property def list_resources( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py index 5ce94d9c52f7..6c87a43cf63b 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py @@ -16,20 +16,27 @@ from .config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -37,6 +44,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -53,20 +64,27 @@ __all__ = ( "ApplyResults", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -74,6 +92,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/config.py b/packages/google-cloud-config/google/cloud/config_v1/types/config.py index 91145d025753..589c58143948 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/config.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/config.py @@ -60,6 +60,17 @@ "UnlockDeploymentRequest", "ExportLockInfoRequest", "LockInfo", + "Preview", + "PreviewOperationMetadata", + "PreviewArtifacts", + "CreatePreviewRequest", + "GetPreviewRequest", + "ListPreviewsRequest", + "ListPreviewsResponse", + "DeletePreviewRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", + "PreviewResult", }, ) @@ -889,6 +900,11 @@ class DeletePolicy(proto.Enum): class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -896,6 +912,11 @@ class OperationMetadata(proto.Message): Output only. Metadata about the deployment operation state. + This field is a member of `oneof`_ ``resource_metadata``. + preview_metadata (google.cloud.config_v1.types.PreviewOperationMetadata): + Output only. Metadata about the preview + operation state. + This field is a member of `oneof`_ ``resource_metadata``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -929,6 +950,12 @@ class OperationMetadata(proto.Message): oneof="resource_metadata", message="DeploymentOperationMetadata", ) + preview_metadata: "PreviewOperationMetadata" = proto.Field( + proto.MESSAGE, + number=9, + oneof="resource_metadata", + message="PreviewOperationMetadata", + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, @@ -1799,4 +1826,622 @@ class LockInfo(proto.Message): ) +class Preview(proto.Message): + r"""A preview represents a set of actions Infra Manager would + perform to move the resources towards the desired state as + specified in the configuration. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + terraform_blueprint (google.cloud.config_v1.types.TerraformBlueprint): + The terraform blueprint to preview. + + This field is a member of `oneof`_ ``blueprint``. + name (str): + Identifier. Resource name of the preview. Resource name can + be user provided or server generated ID if unspecified. + Format: + ``projects/{project}/locations/{location}/previews/{preview}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the preview was created. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + preview. + state (google.cloud.config_v1.types.Preview.State): + Output only. Current state of the preview. + deployment (str): + Optional. Optional deployment reference. If + specified, the preview will be performed using + the provided deployment's current state and use + any relevant fields from the deployment unless + explicitly specified in the preview create + request. + preview_mode (google.cloud.config_v1.types.Preview.PreviewMode): + Optional. Current mode of preview. + service_account (str): + Optional. Optional service account. If + omitted, the deployment resource reference must + be provided, and the service account attached to + the deployment will be used. + artifacts_gcs_bucket (str): + Optional. User-defined location of Cloud Build logs, + artifacts, and in Google Cloud Storage. Format: + ``gs://{bucket}/{folder}`` A default bucket will be + bootstrapped if the field is not set or empty Default Bucket + Format: ``gs://--blueprint-config`` + Constraints: + + - The bucket needs to be in the same project as the + deployment + - The path cannot be within the path of ``gcs_source`` If + omitted and deployment resource ref provided has + artifacts_gcs_bucket defined, that artifact bucket is + used. + + This field is a member of `oneof`_ ``_artifacts_gcs_bucket``. + worker_pool (str): + Optional. The user-specified Worker Pool resource in which + the Cloud Build job will execute. Format + projects/{project}/locations/{location}/workerPools/{workerPoolId} + If this field is unspecified, the default Cloud Build worker + pool will be used. If omitted and deployment resource ref + provided has worker_pool defined, that worker pool is used. + + This field is a member of `oneof`_ ``_worker_pool``. + error_code (google.cloud.config_v1.types.Preview.ErrorCode): + Output only. Code describing any errors that + may have occurred. + error_status (google.rpc.status_pb2.Status): + Output only. Additional information regarding + the current state. + build (str): + Output only. Cloud Build instance UUID + associated with this preview. + tf_errors (MutableSequence[google.cloud.config_v1.types.TerraformError]): + Output only. Summary of errors encountered + during Terraform preview. It has a size limit of + 10, i.e. only top 10 errors will be summarized + here. + error_logs (str): + Output only. Link to tf-error.ndjson file, which contains + the full list of the errors encountered during a Terraform + preview. Format: ``gs://{bucket}/{object}``. + preview_artifacts (google.cloud.config_v1.types.PreviewArtifacts): + Output only. Artifacts from preview. + logs (str): + Output only. Location of preview logs in + ``gs://{bucket}/{object}`` format. + """ + + class State(proto.Enum): + r"""Possible states of a preview. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is unknown. + CREATING (1): + The preview is being created. + SUCCEEDED (2): + The preview has succeeded. + APPLYING (3): + The preview is being applied. + STALE (4): + The preview is stale. A preview can become + stale if a revision has been applied after this + preview was created. + DELETING (5): + The preview is being deleted. + FAILED (6): + The preview has encountered an unexpected + error. + DELETED (7): + The preview has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + SUCCEEDED = 2 + APPLYING = 3 + STALE = 4 + DELETING = 5 + FAILED = 6 + DELETED = 7 + + class PreviewMode(proto.Enum): + r"""Preview mode provides options for customizing preview + operations. + + Values: + PREVIEW_MODE_UNSPECIFIED (0): + Unspecified policy, default mode will be + used. + DEFAULT (1): + DEFAULT mode generates an execution plan for + reconciling current resource state into expected + resource state. + DELETE (2): + DELETE mode generates as execution plan for + destroying current resources. + """ + PREVIEW_MODE_UNSPECIFIED = 0 + DEFAULT = 1 + DELETE = 2 + + class ErrorCode(proto.Enum): + r"""Possible errors that can occur with previews. + + Values: + ERROR_CODE_UNSPECIFIED (0): + No error code was specified. + CLOUD_BUILD_PERMISSION_DENIED (1): + Cloud Build failed due to a permissions + issue. + BUCKET_CREATION_PERMISSION_DENIED (2): + Cloud Storage bucket failed to create due to + a permissions issue. + BUCKET_CREATION_FAILED (3): + Cloud Storage bucket failed for a + non-permissions-related issue. + DEPLOYMENT_LOCK_ACQUIRE_FAILED (4): + Acquiring lock on provided deployment + reference failed. + PREVIEW_BUILD_API_FAILED (5): + Preview encountered an error when trying to + access Cloud Build API. + PREVIEW_BUILD_RUN_FAILED (6): + Preview created a build but build failed and + logs were generated. + """ + ERROR_CODE_UNSPECIFIED = 0 + CLOUD_BUILD_PERMISSION_DENIED = 1 + BUCKET_CREATION_PERMISSION_DENIED = 2 + BUCKET_CREATION_FAILED = 3 + DEPLOYMENT_LOCK_ACQUIRE_FAILED = 4 + PREVIEW_BUILD_API_FAILED = 5 + PREVIEW_BUILD_RUN_FAILED = 6 + + terraform_blueprint: "TerraformBlueprint" = proto.Field( + proto.MESSAGE, + number=6, + oneof="blueprint", + message="TerraformBlueprint", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + deployment: str = proto.Field( + proto.STRING, + number=5, + ) + preview_mode: PreviewMode = proto.Field( + proto.ENUM, + number=15, + enum=PreviewMode, + ) + service_account: str = proto.Field( + proto.STRING, + number=7, + ) + artifacts_gcs_bucket: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=10, + enum=ErrorCode, + ) + error_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + build: str = proto.Field( + proto.STRING, + number=12, + ) + tf_errors: MutableSequence["TerraformError"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="TerraformError", + ) + error_logs: str = proto.Field( + proto.STRING, + number=14, + ) + preview_artifacts: "PreviewArtifacts" = proto.Field( + proto.MESSAGE, + number=16, + message="PreviewArtifacts", + ) + logs: str = proto.Field( + proto.STRING, + number=17, + ) + + +class PreviewOperationMetadata(proto.Message): + r"""Ephemeral metadata content describing the state of a preview + operation. + + Attributes: + step (google.cloud.config_v1.types.PreviewOperationMetadata.PreviewStep): + The current step the preview operation is + running. + preview_artifacts (google.cloud.config_v1.types.PreviewArtifacts): + Artifacts from preview. + logs (str): + Output only. Location of preview logs in + ``gs://{bucket}/{object}`` format. + build (str): + Output only. Cloud Build instance UUID + associated with this preview. + """ + + class PreviewStep(proto.Enum): + r"""The possible steps a preview may be running. + + Values: + PREVIEW_STEP_UNSPECIFIED (0): + Unspecified preview step. + PREPARING_STORAGE_BUCKET (1): + Infra Manager is creating a Google Cloud + Storage bucket to store artifacts and metadata + about the preview. + DOWNLOADING_BLUEPRINT (2): + Downloading the blueprint onto the Google + Cloud Storage bucket. + RUNNING_TF_INIT (3): + Initializing Terraform using ``terraform init``. + RUNNING_TF_PLAN (4): + Running ``terraform plan``. + FETCHING_DEPLOYMENT (5): + Fetching a deployment. + LOCKING_DEPLOYMENT (6): + Locking a deployment. + UNLOCKING_DEPLOYMENT (7): + Unlocking a deployment. + SUCCEEDED (8): + Operation was successful. + FAILED (9): + Operation failed. + """ + PREVIEW_STEP_UNSPECIFIED = 0 + PREPARING_STORAGE_BUCKET = 1 + DOWNLOADING_BLUEPRINT = 2 + RUNNING_TF_INIT = 3 + RUNNING_TF_PLAN = 4 + FETCHING_DEPLOYMENT = 5 + LOCKING_DEPLOYMENT = 6 + UNLOCKING_DEPLOYMENT = 7 + SUCCEEDED = 8 + FAILED = 9 + + step: PreviewStep = proto.Field( + proto.ENUM, + number=1, + enum=PreviewStep, + ) + preview_artifacts: "PreviewArtifacts" = proto.Field( + proto.MESSAGE, + number=2, + message="PreviewArtifacts", + ) + logs: str = proto.Field( + proto.STRING, + number=3, + ) + build: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PreviewArtifacts(proto.Message): + r"""Artifacts created by preview. + + Attributes: + content (str): + Output only. Location of a blueprint copy and other content + in Google Cloud Storage. Format: ``gs://{bucket}/{object}`` + artifacts (str): + Output only. Location of artifacts in Google Cloud Storage. + Format: ``gs://{bucket}/{object}`` + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + artifacts: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreatePreviewRequest(proto.Message): + r"""A request to create a preview. + + Attributes: + parent (str): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + preview_id (str): + Optional. The preview ID. + preview (google.cloud.config_v1.types.Preview): + Required. [Preview][google.cloud.config.v1.Preview] resource + to be created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + preview_id: str = proto.Field( + proto.STRING, + number=2, + ) + preview: "Preview" = proto.Field( + proto.MESSAGE, + number=3, + message="Preview", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetPreviewRequest(proto.Message): + r"""A request to get details about a preview. + + Attributes: + name (str): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPreviewsRequest(proto.Message): + r"""A request to list all previews for a given project and + location. + + Attributes: + parent (str): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + page_size (int): + Optional. When requesting a page of resources, 'page_size' + specifies number of resources to return. If unspecified or + set to 0, all resources will be returned. + page_token (str): + Optional. Token returned by previous call to + 'ListDeployments' which specifies the position + in the list from where to continue listing the + resources. + filter (str): + Optional. Lists the Deployments that match the filter + expression. A filter expression filters the resources listed + in the response. The expression must be of the form '{field} + {operator} {value}' where operators: '<', '>', '<=', '>=', + '!=', '=', ':' are supported (colon ':' represents a HAS + operator which is roughly synonymous with equality). {field} + can refer to a proto or JSON field, or a synthetic field. + Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deployments/bar + + - Filter by labels: + + - Resources that have a key called 'foo' labels.foo:\* + - Resources that have a key called 'foo' whose value is + 'bar' labels.foo = bar + + - Filter by state: + + - Deployments in CREATING state. state=CREATING + order_by (str): + Optional. Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPreviewsResponse(proto.Message): + r"""A response to a ``ListPreviews`` call. Contains a list of Previews. + + Attributes: + previews (MutableSequence[google.cloud.config_v1.types.Preview]): + List of [Previews][]s. + next_page_token (str): + Token to be supplied to the next ListPreviews request via + ``page_token`` to obtain the next set of results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + previews: MutableSequence["Preview"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Preview", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePreviewRequest(proto.Message): + r"""A request to delete a preview. + + Attributes: + name (str): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExportPreviewResultRequest(proto.Message): + r"""A request to export preview results. + + Attributes: + parent (str): + Required. The preview whose results should be exported. The + preview value is in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportPreviewResultResponse(proto.Message): + r"""A response to ``ExportPreviewResult`` call. Contains preview + results. + + Attributes: + result (google.cloud.config_v1.types.PreviewResult): + Output only. Signed URLs for accessing the + plan files. + """ + + result: "PreviewResult" = proto.Field( + proto.MESSAGE, + number=1, + message="PreviewResult", + ) + + +class PreviewResult(proto.Message): + r"""Contains a signed Cloud Storage URLs. + + Attributes: + binary_signed_uri (str): + Output only. Plan binary signed URL + json_signed_uri (str): + Output only. Plan JSON signed URL + """ + + binary_signed_uri: str = proto.Field( + proto.STRING, + number=1, + ) + json_signed_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-config/noxfile.py b/packages/google-cloud-config/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-config/noxfile.py +++ b/packages/google-cloud-config/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py new file mode 100644 index 000000000000..16fcc030958b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreatePreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_create_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreatePreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py new file mode 100644 index 000000000000..db581ef7d92b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreatePreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_create_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreatePreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py new file mode 100644 index 000000000000..e0582077006b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeletePreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_delete_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeletePreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py new file mode 100644 index 000000000000..4b79b21e5600 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeletePreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_delete_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeletePreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py new file mode 100644 index 000000000000..4ed8c1b5b82b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportPreviewResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportPreviewResult_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_preview_result(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportPreviewResult_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py new file mode 100644 index 000000000000..e5b314c834fa --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportPreviewResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportPreviewResult_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_preview_result(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportPreviewResult_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py new file mode 100644 index 000000000000..e27ee6bbb74c --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetPreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_preview(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetPreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py new file mode 100644 index 000000000000..746f326e3a5b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetPreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = client.get_preview(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetPreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py new file mode 100644 index 000000000000..76176272ca92 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPreviews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListPreviews_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_previews(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListPreviews_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py new file mode 100644 index 000000000000..b781dfa7f067 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPreviews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListPreviews_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_previews(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListPreviews_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index 1dc05379a40f..7885778d0ae8 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.2" + "version": "0.1.3" }, "snippets": [ { @@ -188,6 +188,175 @@ ], "title": "config_v1_generated_config_create_deployment_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.create_preview", + "method": { + "fullName": "google.cloud.config.v1.Config.CreatePreview", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreatePreview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreatePreviewRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "preview", + "type": "google.cloud.config_v1.types.Preview" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_preview" + }, + "description": "Sample for CreatePreview", + "file": "config_v1_generated_config_create_preview_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreatePreview_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_preview_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.create_preview", + "method": { + "fullName": "google.cloud.config.v1.Config.CreatePreview", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreatePreview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreatePreviewRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "preview", + "type": "google.cloud.config_v1.types.Preview" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_preview" + }, + "description": "Sample for CreatePreview", + "file": "config_v1_generated_config_create_preview_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreatePreview_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_preview_sync.py" + }, { "canonical": true, "clientMethod": { @@ -357,19 +526,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -388,21 +557,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_async.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_async", + "regionTag": "config_v1_generated_Config_DeletePreview_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -412,20 +582,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_async.py" + "title": "config_v1_generated_config_delete_preview_async.py" }, { "canonical": true, @@ -434,19 +606,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -465,21 +637,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_sync.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", + "regionTag": "config_v1_generated_Config_DeletePreview_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -489,20 +662,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_sync.py" + "title": "config_v1_generated_config_delete_preview_sync.py" }, { "canonical": true, @@ -512,19 +687,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "DeleteStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -539,22 +718,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "shortName": "delete_statefile" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "regionTag": "config_v1_generated_Config_DeleteStatefile_async", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -564,22 +742,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 51, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_deployment_statefile_async.py" + "title": "config_v1_generated_config_delete_statefile_async.py" }, { "canonical": true, @@ -588,19 +764,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "DeleteStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -615,22 +795,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "shortName": "delete_statefile" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -640,18 +819,169 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -667,21 +997,331 @@ }, "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_lock_info", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "ExportRevisionStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" }, { "name": "retry", @@ -696,14 +1336,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_async.py", + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", "segments": [ { "end": 51, @@ -736,7 +1376,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_async.py" + "title": "config_v1_generated_config_export_revision_statefile_async.py" }, { "canonical": true, @@ -745,23 +1385,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "ExportRevisionStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" }, { "name": "retry", @@ -776,14 +1412,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_sync.py", + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", "segments": [ { "end": 51, @@ -816,7 +1452,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_sync.py" + "title": "config_v1_generated_config_export_revision_statefile_sync.py" }, { "canonical": true, @@ -826,19 +1462,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -853,14 +1493,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_async.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", + "regionTag": "config_v1_generated_Config_GetDeployment_async", "segments": [ { "end": 51, @@ -893,7 +1533,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_async.py" + "title": "config_v1_generated_config_get_deployment_async.py" }, { "canonical": true, @@ -902,19 +1542,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -929,14 +1573,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_sync.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", + "regionTag": "config_v1_generated_Config_GetDeployment_sync", "segments": [ { "end": 51, @@ -969,7 +1613,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_sync.py" + "title": "config_v1_generated_config_get_deployment_sync.py" }, { "canonical": true, @@ -979,19 +1623,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1010,14 +1654,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_async.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_async", + "regionTag": "config_v1_generated_Config_GetPreview_async", "segments": [ { "end": 51, @@ -1050,7 +1694,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_async.py" + "title": "config_v1_generated_config_get_preview_async.py" }, { "canonical": true, @@ -1059,19 +1703,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1090,14 +1734,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_sync.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_sync", + "regionTag": "config_v1_generated_Config_GetPreview_sync", "segments": [ { "end": 51, @@ -1130,7 +1774,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_sync.py" + "title": "config_v1_generated_config_get_preview_sync.py" }, { "canonical": true, @@ -1784,6 +2428,167 @@ ], "title": "config_v1_generated_config_list_deployments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_previews", + "method": { + "fullName": "google.cloud.config.v1.Config.ListPreviews", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListPreviews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListPreviewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListPreviewsAsyncPager", + "shortName": "list_previews" + }, + "description": "Sample for ListPreviews", + "file": "config_v1_generated_config_list_previews_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListPreviews_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_previews_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_previews", + "method": { + "fullName": "google.cloud.config.v1.Config.ListPreviews", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListPreviews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListPreviewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListPreviewsPager", + "shortName": "list_previews" + }, + "description": "Sample for ListPreviews", + "file": "config_v1_generated_config_list_previews_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListPreviews_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_previews_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py index 8d7b0d1d739f..c9978014e855 100644 --- a/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py +++ b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py @@ -40,16 +40,21 @@ class configCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_deployment': ('parent', 'deployment_id', 'deployment', 'request_id', ), + 'create_preview': ('parent', 'preview', 'preview_id', 'request_id', ), 'delete_deployment': ('name', 'request_id', 'force', 'delete_policy', ), + 'delete_preview': ('name', 'request_id', ), 'delete_statefile': ('name', 'lock_id', ), 'export_deployment_statefile': ('parent', 'draft', ), 'export_lock_info': ('name', ), + 'export_preview_result': ('parent', ), 'export_revision_statefile': ('parent', ), 'get_deployment': ('name', ), + 'get_preview': ('name', ), 'get_resource': ('name', ), 'get_revision': ('name', ), 'import_statefile': ('parent', 'lock_id', 'skip_draft', ), 'list_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_previews': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_resources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_revisions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'lock_deployment': ('name', ), diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index 635b310aed98..e4be71f0f933 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -5024,6 +5024,1336 @@ async def test_export_lock_info_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + config.CreatePreviewRequest, + dict, + ], +) +def test_create_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + client.create_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + +@pytest.mark.asyncio +async def test_create_preview_async( + transport: str = "grpc_asyncio", request_type=config.CreatePreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_preview_async_from_dict(): + await test_create_preview_async(request_type=dict) + + +def test_create_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreatePreviewRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreatePreviewRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_preview( + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].preview + mock_val = config.Preview( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + + +def test_create_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + +@pytest.mark.asyncio +async def test_create_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_preview( + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].preview + mock_val = config.Preview( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetPreviewRequest, + dict, + ], +) +def test_get_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) + response = client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" + + +def test_get_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + client.get_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + +@pytest.mark.asyncio +async def test_get_preview_async( + transport: str = "grpc_asyncio", request_type=config.GetPreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) + ) + response = await client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" + + +@pytest.mark.asyncio +async def test_get_preview_async_from_dict(): + await test_get_preview_async(request_type=dict) + + +def test_get_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetPreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + call.return_value = config.Preview() + client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetPreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Preview()) + await client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_preview( + config.GetPreviewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Preview()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_preview( + config.GetPreviewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListPreviewsRequest, + dict, + ], +) +def test_list_previews(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPreviewsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_previews_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + client.list_previews() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + +@pytest.mark.asyncio +async def test_list_previews_async( + transport: str = "grpc_asyncio", request_type=config.ListPreviewsRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPreviewsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_previews_async_from_dict(): + await test_list_previews_async(request_type=dict) + + +def test_list_previews_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListPreviewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + call.return_value = config.ListPreviewsResponse() + client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_previews_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListPreviewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse() + ) + await client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_previews_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_previews( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_previews_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_previews_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_previews( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_previews_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) + + +def test_list_previews_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_previews(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) + + +def test_list_previews_pages(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + pages = list(client.list_previews(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_previews_async_pager(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_previews), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_previews( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, config.Preview) for i in responses) + + +@pytest.mark.asyncio +async def test_list_previews_async_pages(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_previews), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_previews(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeletePreviewRequest, + dict, + ], +) +def test_delete_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + client.delete_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + +@pytest.mark.asyncio +async def test_delete_preview_async( + transport: str = "grpc_asyncio", request_type=config.DeletePreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_preview_async_from_dict(): + await test_delete_preview_async(request_type=dict) + + +def test_delete_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeletePreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeletePreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_preview( + config.DeletePreviewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_preview( + config.DeletePreviewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportPreviewResultRequest, + dict, + ], +) +def test_export_preview_result(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ExportPreviewResultResponse() + response = client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ExportPreviewResultResponse) + + +def test_export_preview_result_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + client.export_preview_result() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + +@pytest.mark.asyncio +async def test_export_preview_result_async( + transport: str = "grpc_asyncio", request_type=config.ExportPreviewResultRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ExportPreviewResultResponse() + ) + response = await client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ExportPreviewResultResponse) + + +@pytest.mark.asyncio +async def test_export_preview_result_async_from_dict(): + await test_export_preview_result_async(request_type=dict) + + +def test_export_preview_result_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportPreviewResultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + call.return_value = config.ExportPreviewResultResponse() + client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_preview_result_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportPreviewResultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ExportPreviewResultResponse() + ) + await client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -5031,20 +6361,1756 @@ async def test_export_lock_info_flattened_error_async(): dict, ], ) -def test_list_deployments_rest(request_type): +def test_list_deployments_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployments_rest_required_fields( + request_type=config.ListDeploymentsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.ListDeploymentsResponse.to_json( + config.ListDeploymentsResponse() + ) + + request = config.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListDeploymentsResponse() + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=config.ListDeploymentsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) + + +def test_list_deployments_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + + +def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Deployment.to_json(config.Deployment()) + + request = config.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Deployment() + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=config.GetDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) + + +def test_get_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +def test_get_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_deployment_rest_required_fields( + request_type=config.CreateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deployment(request) + + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "deploymentId", + "deployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=config.CreateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) + + +def test_create_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +def test_create_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "projects/sample1/locations/sample2/deployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_deployment_rest_required_fields( + request_type=config.UpdateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("deployment",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UpdateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) + + +def test_update_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deployment_rest_required_fields( + request_type=config.DeleteDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delete_policy", + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deletePolicy", + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=config.DeleteDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) + + +def test_delete_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", + ) + + +def test_delete_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListRevisionsRequest, + dict, + ], +) +def test_list_revisions_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse( + return_value = config.ListRevisionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -5053,22 +8119,20 @@ def test_list_deployments_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_deployments(request) + response = client.list_revisions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) + assert isinstance(response, pagers.ListRevisionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_deployments_rest_required_fields( - request_type=config.ListDeploymentsRequest, -): +def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -5087,7 +8151,7 @@ def test_list_deployments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5096,7 +8160,7 @@ def test_list_deployments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -5119,7 +8183,7 @@ def test_list_deployments_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = config.ListRevisionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5140,25 +8204,25 @@ def test_list_deployments_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_deployments(request) + response = client.list_revisions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_deployments_rest_unset_required_fields(): +def test_list_revisions_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_deployments._get_unset_required_fields({}) + unset_fields = transport.list_revisions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -5173,7 +8237,7 @@ def test_list_deployments_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): +def test_list_revisions_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5184,13 +8248,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_deployments" + transports.ConfigRestInterceptor, "post_list_revisions" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_deployments" + transports.ConfigRestInterceptor, "pre_list_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5201,19 +8265,19 @@ def test_list_deployments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListDeploymentsResponse.to_json( - config.ListDeploymentsResponse() + req.return_value._content = config.ListRevisionsResponse.to_json( + config.ListRevisionsResponse() ) - request = config.ListDeploymentsRequest() + request = config.ListRevisionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListDeploymentsResponse() + post.return_value = config.ListRevisionsResponse() - client.list_deployments( + client.list_revisions( request, metadata=[ ("key", "val"), @@ -5225,8 +8289,8 @@ def test_list_deployments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_deployments_rest_bad_request( - transport: str = "rest", request_type=config.ListDeploymentsRequest +def test_list_revisions_rest_bad_request( + transport: str = "rest", request_type=config.ListRevisionsRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5234,7 +8298,7 @@ def test_list_deployments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5246,10 +8310,10 @@ def test_list_deployments_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_deployments(request) + client.list_revisions(request) -def test_list_deployments_rest_flattened(): +def test_list_revisions_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5258,10 +8322,12 @@ def test_list_deployments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = config.ListRevisionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -5273,25 +8339,25 @@ def test_list_deployments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_deployments(**mock_args) + client.list_revisions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" % client.transport._host, args[1], ) -def test_list_deployments_rest_flattened_error(transport: str = "rest"): +def test_list_revisions_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5300,13 +8366,13 @@ def test_list_deployments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_deployments( - config.ListDeploymentsRequest(), + client.list_revisions( + config.ListRevisionsRequest(), parent="parent_value", ) -def test_list_deployments_rest_pager(transport: str = "rest"): +def test_list_revisions_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5318,28 +8384,28 @@ def test_list_deployments_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), ], next_page_token="abc", ), - config.ListDeploymentsResponse( - deployments=[], + config.ListRevisionsResponse( + revisions=[], next_page_token="def", ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), ], next_page_token="ghi", ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), ], ), ) @@ -5347,22 +8413,24 @@ def test_list_deployments_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } - pager = client.list_deployments(request=sample_request) + pager = client.list_revisions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.Deployment) for i in results) + assert all(isinstance(i, config.Revision) for i in results) - pages = list(client.list_deployments(request=sample_request).pages) + pages = list(client.list_revisions(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -5370,68 +8438,68 @@ def test_list_deployments_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - config.GetDeploymentRequest, + config.GetRevisionRequest, dict, ], ) -def test_get_deployment_rest(request_type): +def test_get_revision_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment( + return_value = config.Revision( name="name_value", - state=config.Deployment.State.CREATING, - latest_revision="latest_revision_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, state_detail="state_detail_value", - error_code=config.Deployment.ErrorCode.REVISION_FAILED, - delete_build="delete_build_value", - delete_logs="delete_logs_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", error_logs="error_logs_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", service_account="service_account_value", import_existing_resources=True, worker_pool="worker_pool_value", - lock_state=config.Deployment.LockState.LOCKED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_deployment(request) + response = client.get_revision(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Deployment) + assert isinstance(response, config.Revision) assert response.name == "name_value" - assert response.state == config.Deployment.State.CREATING - assert response.latest_revision == "latest_revision_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING assert response.state_detail == "state_detail_value" - assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED - assert response.delete_build == "delete_build_value" - assert response.delete_logs == "delete_logs_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" assert response.error_logs == "error_logs_value" - assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" assert response.service_account == "service_account_value" assert response.import_existing_resources is True assert response.worker_pool == "worker_pool_value" - assert response.lock_state == config.Deployment.LockState.LOCKED -def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): +def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -5450,7 +8518,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5459,7 +8527,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5473,7 +8541,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = config.Revision() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5494,30 +8562,30 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_deployment(request) + response = client.get_revision(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_deployment_rest_unset_required_fields(): +def test_get_revision_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_deployment._get_unset_required_fields({}) + unset_fields = transport.get_revision._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): +def test_get_revision_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5528,13 +8596,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_deployment" + transports.ConfigRestInterceptor, "post_get_revision" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_deployment" + transports.ConfigRestInterceptor, "pre_get_revision" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5545,17 +8613,17 @@ def test_get_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Deployment.to_json(config.Deployment()) + req.return_value._content = config.Revision.to_json(config.Revision()) - request = config.GetDeploymentRequest() + request = config.GetRevisionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Deployment() + post.return_value = config.Revision() - client.get_deployment( + client.get_revision( request, metadata=[ ("key", "val"), @@ -5567,8 +8635,8 @@ def test_get_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_deployment_rest_bad_request( - transport: str = "rest", request_type=config.GetDeploymentRequest +def test_get_revision_rest_bad_request( + transport: str = "rest", request_type=config.GetRevisionRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5576,7 +8644,9 @@ def test_get_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5588,10 +8658,10 @@ def test_get_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_deployment(request) + client.get_revision(request) -def test_get_deployment_rest_flattened(): +def test_get_revision_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5600,11 +8670,11 @@ def test_get_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = config.Revision() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field @@ -5617,25 +8687,25 @@ def test_get_deployment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_deployment(**mock_args) + client.get_revision(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" % client.transport._host, args[1], ) -def test_get_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_revision_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5644,13 +8714,13 @@ def test_get_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_deployment( - config.GetDeploymentRequest(), + client.get_revision( + config.GetRevisionRequest(), name="name_value", ) -def test_get_deployment_rest_error(): +def test_get_revision_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5659,162 +8729,54 @@ def test_get_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.CreateDeploymentRequest, + config.GetResourceRequest, dict, ], ) -def test_create_deployment_rest(request_type): +def test_get_resource_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_deployment(request) + response = client.get_resource(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED -def test_create_deployment_rest_required_fields( - request_type=config.CreateDeploymentRequest, -): +def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" - request_init["deployment_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5826,37 +8788,24 @@ def test_create_deployment_rest_required_fields( ) # verify fields with default values are dropped - assert "deploymentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == request_init["deployment_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["deploymentId"] = "deployment_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "deployment_id", - "request_id", - ) - ) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == "deployment_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5865,7 +8814,7 @@ def test_create_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5877,57 +8826,39 @@ def test_create_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_deployment(request) + response = client.get_resource(request) - expected_params = [ - ( - "deploymentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_deployment_rest_unset_required_fields(): +def test_get_resource_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deploymentId", - "requestId", - ) - ) - & set( - ( - "parent", - "deploymentId", - "deployment", - ) - ) - ) + unset_fields = transport.get_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): +def test_get_resource_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5938,15 +8869,13 @@ def test_create_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_create_deployment" + transports.ConfigRestInterceptor, "post_get_resource" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_create_deployment" + transports.ConfigRestInterceptor, "pre_get_resource" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5957,19 +8886,17 @@ def test_create_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = config.Resource.to_json(config.Resource()) - request = config.CreateDeploymentRequest() + request = config.GetResourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.Resource() - client.create_deployment( + client.get_resource( request, metadata=[ ("key", "val"), @@ -5981,8 +8908,8 @@ def test_create_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_deployment_rest_bad_request( - transport: str = "rest", request_type=config.CreateDeploymentRequest +def test_get_resource_rest_bad_request( + transport: str = "rest", request_type=config.GetResourceRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5990,7 +8917,9 @@ def test_create_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6002,10 +8931,10 @@ def test_create_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_deployment(request) + client.get_resource(request) -def test_create_deployment_rest_flattened(): +def test_get_resource_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6014,44 +8943,42 @@ def test_create_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_deployment(**mock_args) + client.get_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" % client.transport._host, args[1], ) -def test_create_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_resource_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6060,19 +8987,13 @@ def test_create_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_deployment( - config.CreateDeploymentRequest(), - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + client.get_resource( + config.GetResourceRequest(), + name="name_value", ) -def test_create_deployment_rest_error(): +def test_get_resource_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6081,11 +9002,11 @@ def test_create_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.UpdateDeploymentRequest, + config.ListResourcesRequest, dict, ], ) -def test_update_deployment_rest(request_type): +def test_list_resources_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6093,150 +9014,40 @@ def test_update_deployment_rest(request_type): # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "projects/sample1/locations/sample2/deployments/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_deployment(request) + response = client.list_resources(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_deployment_rest_required_fields( - request_type=config.UpdateDeploymentRequest, -): +def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): transport_class = transports.ConfigRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6251,24 +9062,30 @@ def test_update_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6277,7 +9094,7 @@ def test_update_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6289,45 +9106,49 @@ def test_update_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_deployment(request) + response = client.list_resources(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_deployment_rest_unset_required_fields(): +def test_list_resources_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_deployment._get_unset_required_fields({}) + unset_fields = transport.list_resources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - & set(("deployment",)) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): +def test_list_resources_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6338,15 +9159,13 @@ def test_update_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_update_deployment" + transports.ConfigRestInterceptor, "post_list_resources" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_update_deployment" + transports.ConfigRestInterceptor, "pre_list_resources" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6357,19 +9176,19 @@ def test_update_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = config.ListResourcesResponse.to_json( + config.ListResourcesResponse() ) - request = config.UpdateDeploymentRequest() + request = config.ListResourcesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.ListResourcesResponse() - client.update_deployment( + client.list_resources( request, metadata=[ ("key", "val"), @@ -6381,8 +9200,8 @@ def test_update_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_deployment_rest_bad_request( - transport: str = "rest", request_type=config.UpdateDeploymentRequest +def test_list_resources_rest_bad_request( + transport: str = "rest", request_type=config.ListResourcesRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6391,7 +9210,7 @@ def test_update_deployment_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } request = request_type(**request_init) @@ -6404,10 +9223,10 @@ def test_update_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_deployment(request) + client.list_resources(request) -def test_update_deployment_rest_flattened(): +def test_list_resources_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6416,47 +9235,42 @@ def test_update_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field mock_args = dict( - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_deployment(**mock_args) + client.list_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" % client.transport._host, args[1], ) -def test_update_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_resources_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6465,65 +9279,122 @@ def test_update_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_deployment( - config.UpdateDeploymentRequest(), - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resources( + config.ListResourcesRequest(), + parent="parent_value", ) -def test_update_deployment_rest_error(): +def test_list_resources_rest_pager(transport: str = "rest"): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + + pager = client.list_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Resource) for i in results) + + pages = list(client.list_resources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - config.DeleteDeploymentRequest, + config.ExportDeploymentStatefileRequest, dict, ], ) -def test_delete_deployment_rest(request_type): +def test_export_deployment_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_deployment(request) + response = client.export_deployment_statefile(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_delete_deployment_rest_required_fields( - request_type=config.DeleteDeploymentRequest, +def test_export_deployment_statefile_rest_required_fields( + request_type=config.ExportDeploymentStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6538,29 +9409,21 @@ def test_delete_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delete_policy", - "force", - "request_id", - ) - ) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6569,7 +9432,7 @@ def test_delete_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6581,45 +9444,40 @@ def test_delete_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_deployment(request) + response = client.export_deployment_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_deployment_rest_unset_required_fields(): +def test_export_deployment_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deletePolicy", - "force", - "requestId", - ) - ) - & set(("name",)) - ) + unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): +def test_export_deployment_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6630,15 +9488,15 @@ def test_delete_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_deployment" + transports.ConfigRestInterceptor, "post_export_deployment_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_deployment" + transports.ConfigRestInterceptor, "pre_export_deployment_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + pb_message = config.ExportDeploymentStatefileRequest.pb( + config.ExportDeploymentStatefileRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6649,19 +9507,17 @@ def test_delete_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.DeleteDeploymentRequest() + request = config.ExportDeploymentStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.Statefile() - client.delete_deployment( + client.export_deployment_statefile( request, metadata=[ ("key", "val"), @@ -6673,8 +9529,8 @@ def test_delete_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_deployment_rest_bad_request( - transport: str = "rest", request_type=config.DeleteDeploymentRequest +def test_export_deployment_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportDeploymentStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6682,7 +9538,7 @@ def test_delete_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6694,67 +9550,10 @@ def test_delete_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_deployment(request) - - -def test_delete_deployment_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - config.DeleteDeploymentRequest(), - name="name_value", - ) + client.export_deployment_statefile(request) -def test_delete_deployment_rest_error(): +def test_export_deployment_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6763,46 +9562,48 @@ def test_delete_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ListRevisionsRequest, + config.ExportRevisionStatefileRequest, dict, ], ) -def test_list_revisions_rest(request_type): +def test_export_revision_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = config.Statefile( + signed_uri="signed_uri_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_revisions(request) + response = client.export_revision_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRevisionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): +def test_export_revision_statefile_rest_required_fields( + request_type=config.ExportRevisionStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} @@ -6821,7 +9622,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6830,16 +9631,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6853,7 +9645,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6865,49 +9657,40 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_revisions(request) + response = client.export_revision_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_revisions_rest_unset_required_fields(): +def test_export_revision_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_revisions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_revisions_rest_interceptors(null_interceptor): +def test_export_revision_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6918,13 +9701,15 @@ def test_list_revisions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_revisions" + transports.ConfigRestInterceptor, "post_export_revision_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_revisions" + transports.ConfigRestInterceptor, "pre_export_revision_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) + pb_message = config.ExportRevisionStatefileRequest.pb( + config.ExportRevisionStatefileRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6935,19 +9720,17 @@ def test_list_revisions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListRevisionsResponse.to_json( - config.ListRevisionsResponse() - ) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.ListRevisionsRequest() + request = config.ExportRevisionStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListRevisionsResponse() + post.return_value = config.Statefile() - client.list_revisions( + client.export_revision_statefile( request, metadata=[ ("key", "val"), @@ -6959,8 +9742,8 @@ def test_list_revisions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_revisions_rest_bad_request( - transport: str = "rest", request_type=config.ListRevisionsRequest +def test_export_revision_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportRevisionStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6968,7 +9751,9 @@ def test_list_revisions_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6980,200 +9765,63 @@ def test_list_revisions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_revisions(request) - - -def test_list_revisions_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_revisions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" - % client.transport._host, - args[1], - ) - - -def test_list_revisions_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_revisions( - config.ListRevisionsRequest(), - parent="parent_value", - ) + client.export_revision_statefile(request) -def test_list_revisions_rest_pager(transport: str = "rest"): +def test_export_revision_statefile_rest_error(): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - config.Revision(), - ], - next_page_token="abc", - ), - config.ListRevisionsResponse( - revisions=[], - next_page_token="def", - ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - ], - next_page_token="ghi", - ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - pager = client.list_revisions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Revision) for i in results) - - pages = list(client.list_revisions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - config.GetRevisionRequest, + config.ImportStatefileRequest, dict, ], ) -def test_get_revision_rest(request_type): +def test_import_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Revision( - name="name_value", - action=config.Revision.Action.CREATE, - state=config.Revision.State.APPLYING, - state_detail="state_detail_value", - error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - logs="logs_value", - error_logs="error_logs_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", + return_value = config.Statefile( + signed_uri="signed_uri_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_revision(request) + response = client.import_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Revision) - assert response.name == "name_value" - assert response.action == config.Revision.Action.CREATE - assert response.state == config.Revision.State.APPLYING - assert response.state_detail == "state_detail_value" - assert ( - response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED - ) - assert response.build == "build_value" - assert response.logs == "logs_value" - assert response.error_logs == "error_logs_value" - assert response.service_account == "service_account_value" - assert response.import_existing_resources is True - assert response.worker_pool == "worker_pool_value" + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): +def test_import_statefile_rest_required_fields( + request_type=config.ImportStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7188,21 +9836,24 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7211,7 +9862,7 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Revision() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7223,39 +9874,48 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_revision(request) + response = client.import_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_revision_rest_unset_required_fields(): +def test_import_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_revision._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_revision_rest_interceptors(null_interceptor): +def test_import_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7266,13 +9926,13 @@ def test_get_revision_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_revision" + transports.ConfigRestInterceptor, "post_import_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_revision" + transports.ConfigRestInterceptor, "pre_import_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) + pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7283,17 +9943,17 @@ def test_get_revision_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Revision.to_json(config.Revision()) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.GetRevisionRequest() + request = config.ImportStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Revision() + post.return_value = config.Statefile() - client.get_revision( + client.import_statefile( request, metadata=[ ("key", "val"), @@ -7305,8 +9965,8 @@ def test_get_revision_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_revision_rest_bad_request( - transport: str = "rest", request_type=config.GetRevisionRequest +def test_import_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ImportStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7314,9 +9974,7 @@ def test_get_revision_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7328,10 +9986,10 @@ def test_get_revision_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_revision(request) + client.import_statefile(request) -def test_get_revision_rest_flattened(): +def test_import_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7340,16 +9998,17 @@ def test_get_revision_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Revision() + return_value = config.Statefile() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + "parent": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + lock_id=725, ) mock_args.update(sample_request) @@ -7357,25 +10016,25 @@ def test_get_revision_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_revision(**mock_args) + client.import_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" + "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" % client.transport._host, args[1], ) -def test_get_revision_rest_flattened_error(transport: str = "rest"): +def test_import_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7384,13 +10043,14 @@ def test_get_revision_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_revision( - config.GetRevisionRequest(), - name="name_value", + client.import_statefile( + config.ImportStatefileRequest(), + parent="parent_value", + lock_id=725, ) -def test_get_revision_rest_error(): +def test_import_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7399,54 +10059,46 @@ def test_get_revision_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.GetResourceRequest, + config.DeleteStatefileRequest, dict, ], ) -def test_get_resource_rest(request_type): +def test_delete_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Resource( - name="name_value", - intent=config.Resource.Intent.CREATE, - state=config.Resource.State.PLANNED, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_resource(request) + response = client.delete_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Resource) - assert response.name == "name_value" - assert response.intent == config.Resource.Intent.CREATE - assert response.state == config.Resource.State.PLANNED + assert response is None -def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): +def test_delete_statefile_rest_required_fields( + request_type=config.DeleteStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7461,21 +10113,24 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7484,7 +10139,7 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Resource() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7496,39 +10151,45 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_resource(request) + response = client.delete_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_rest_unset_required_fields(): +def test_delete_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_rest_interceptors(null_interceptor): +def test_delete_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7539,13 +10200,10 @@ def test_get_resource_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource" - ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_resource" + transports.ConfigRestInterceptor, "pre_delete_statefile" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) + pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7556,17 +10214,15 @@ def test_get_resource_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Resource.to_json(config.Resource()) - request = config.GetResourceRequest() + request = config.DeleteStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Resource() - client.get_resource( + client.delete_statefile( request, metadata=[ ("key", "val"), @@ -7575,11 +10231,10 @@ def test_get_resource_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_resource_rest_bad_request( - transport: str = "rest", request_type=config.GetResourceRequest +def test_delete_statefile_rest_bad_request( + transport: str = "rest", request_type=config.DeleteStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7587,9 +10242,7 @@ def test_get_resource_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7601,10 +10254,10 @@ def test_get_resource_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_resource(request) + client.delete_statefile(request) -def test_get_resource_rest_flattened(): +def test_delete_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7613,11 +10266,11 @@ def test_get_resource_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Resource() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field @@ -7629,26 +10282,24 @@ def test_get_resource_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_resource(**mock_args) + client.delete_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" % client.transport._host, args[1], ) -def test_get_resource_rest_flattened_error(transport: str = "rest"): +def test_delete_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7657,13 +10308,13 @@ def test_get_resource_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource( - config.GetResourceRequest(), + client.delete_statefile( + config.DeleteStatefileRequest(), name="name_value", ) -def test_get_resource_rest_error(): +def test_delete_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7672,52 +10323,45 @@ def test_get_resource_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ListResourcesRequest, + config.LockDeploymentRequest, dict, ], ) -def test_list_resources_rest(request_type): +def test_lock_deployment_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_resources(request) + response = client.lock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourcesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): +def test_lock_deployment_rest_required_fields( + request_type=config.LockDeploymentRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7732,30 +10376,21 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7764,7 +10399,7 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7776,49 +10411,37 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_resources(request) + response = client.lock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resources_rest_unset_required_fields(): +def test_lock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.lock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resources_rest_interceptors(null_interceptor): +def test_lock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7829,13 +10452,15 @@ def test_list_resources_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resources" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_lock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resources" + transports.ConfigRestInterceptor, "pre_lock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) + pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7846,19 +10471,19 @@ def test_list_resources_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListResourcesResponse.to_json( - config.ListResourcesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = config.ListResourcesRequest() + request = config.LockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourcesResponse() + post.return_value = operations_pb2.Operation() - client.list_resources( + client.lock_deployment( request, metadata=[ ("key", "val"), @@ -7870,8 +10495,8 @@ def test_list_resources_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_resources_rest_bad_request( - transport: str = "rest", request_type=config.ListResourcesRequest +def test_lock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.LockDeploymentRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7879,9 +10504,7 @@ def test_list_resources_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7893,10 +10516,10 @@ def test_list_resources_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_resources(request) + client.lock_deployment(request) -def test_list_resources_rest_flattened(): +def test_lock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7905,42 +10528,40 @@ def test_list_resources_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_resources(**mock_args) + client.lock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" + "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" % client.transport._host, args[1], ) -def test_list_resources_rest_flattened_error(transport: str = "rest"): +def test_lock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7949,122 +10570,61 @@ def test_list_resources_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resources( - config.ListResourcesRequest(), - parent="parent_value", + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", ) -def test_list_resources_rest_pager(transport: str = "rest"): +def test_lock_deployment_rest_error(): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - config.Resource(), - ], - next_page_token="abc", - ), - config.ListResourcesResponse( - resources=[], - next_page_token="def", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - ], - next_page_token="ghi", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - - pager = client.list_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Resource) for i in results) - - pages = list(client.list_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - config.ExportDeploymentStatefileRequest, + config.UnlockDeploymentRequest, dict, ], ) -def test_export_deployment_statefile_rest(request_type): +def test_unlock_deployment_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_deployment_statefile(request) + response = client.unlock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert response.operation.name == "operations/spam" -def test_export_deployment_statefile_rest_required_fields( - request_type=config.ExportDeploymentStatefileRequest, +def test_unlock_deployment_rest_required_fields( + request_type=config.UnlockDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8079,21 +10639,24 @@ def test_export_deployment_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8102,7 +10665,7 @@ def test_export_deployment_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8122,32 +10685,37 @@ def test_export_deployment_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_deployment_statefile(request) + response = client.unlock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_deployment_statefile_rest_unset_required_fields(): +def test_unlock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_deployment_statefile_rest_interceptors(null_interceptor): +def test_unlock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8158,15 +10726,15 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_deployment_statefile" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_unlock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_deployment_statefile" + transports.ConfigRestInterceptor, "pre_unlock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportDeploymentStatefileRequest.pb( - config.ExportDeploymentStatefileRequest() - ) + pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8177,17 +10745,19 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = config.ExportDeploymentStatefileRequest() + request = config.UnlockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = operations_pb2.Operation() - client.export_deployment_statefile( + client.unlock_deployment( request, metadata=[ ("key", "val"), @@ -8199,8 +10769,8 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_deployment_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ExportDeploymentStatefileRequest +def test_unlock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UnlockDeploymentRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8208,22 +10778,81 @@ def test_export_deployment_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_deployment_statefile(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unlock_deployment(request) + + +def test_unlock_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + lock_id=725, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unlock_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" + % client.transport._host, + args[1], + ) + + +def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unlock_deployment( + config.UnlockDeploymentRequest(), + name="name_value", + lock_id=725, + ) -def test_export_deployment_statefile_rest_error(): +def test_unlock_deployment_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8232,52 +10861,58 @@ def test_export_deployment_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ExportRevisionStatefileRequest, + config.ExportLockInfoRequest, dict, ], ) -def test_export_revision_statefile_rest(request_type): +def test_export_lock_info_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", + return_value = config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_revision_statefile(request) + response = client.export_lock_info(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" -def test_export_revision_statefile_rest_required_fields( - request_type=config.ExportRevisionStatefileRequest, +def test_export_lock_info_rest_required_fields( + request_type=config.ExportLockInfoRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8292,21 +10927,21 @@ def test_export_revision_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8315,7 +10950,7 @@ def test_export_revision_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = config.LockInfo() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8327,40 +10962,39 @@ def test_export_revision_statefile_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_revision_statefile(request) + response = client.export_lock_info(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_revision_statefile_rest_unset_required_fields(): +def test_export_lock_info_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.export_lock_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_revision_statefile_rest_interceptors(null_interceptor): +def test_export_lock_info_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8371,15 +11005,13 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_revision_statefile" + transports.ConfigRestInterceptor, "post_export_lock_info" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_revision_statefile" + transports.ConfigRestInterceptor, "pre_export_lock_info" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportRevisionStatefileRequest.pb( - config.ExportRevisionStatefileRequest() - ) + pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8390,17 +11022,17 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = config.LockInfo.to_json(config.LockInfo()) - request = config.ExportRevisionStatefileRequest() + request = config.ExportLockInfoRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = config.LockInfo() - client.export_revision_statefile( + client.export_lock_info( request, metadata=[ ("key", "val"), @@ -8412,8 +11044,8 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_revision_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ExportRevisionStatefileRequest +def test_export_lock_info_rest_bad_request( + transport: str = "rest", request_type=config.ExportLockInfoRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8421,9 +11053,7 @@ def test_export_revision_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8435,10 +11065,69 @@ def test_export_revision_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_revision_statefile(request) + client.export_lock_info(request) -def test_export_revision_statefile_rest_error(): +def test_export_lock_info_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.LockInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.LockInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_lock_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" + % client.transport._host, + args[1], + ) + + +def test_export_lock_info_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_lock_info( + config.ExportLockInfoRequest(), + name="name_value", + ) + + +def test_export_lock_info_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8447,51 +11136,156 @@ def test_export_revision_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ImportStatefileRequest, + config.CreatePreviewRequest, dict, ], ) -def test_import_statefile_rest(request_type): +def test_create_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["preview"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, + "deployment": "deployment_value", + "preview_mode": 1, + "service_account": "service_account_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "worker_pool": "worker_pool_value", + "error_code": 1, + "error_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "build": "build_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": {}, + } + ], + "error_logs": "error_logs_value", + "preview_artifacts": { + "content": "content_value", + "artifacts": "artifacts_value", + }, + "logs": "logs_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreatePreviewRequest.meta.fields["preview"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["preview"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["preview"][field])): + del request_init["preview"][field][i][subfield] + else: + del request_init["preview"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_statefile(request) + response = client.create_preview(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert response.operation.name == "operations/spam" -def test_import_statefile_rest_required_fields( - request_type=config.ImportStatefileRequest, -): +def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["parent"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8506,24 +11300,28 @@ def test_import_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "preview_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8532,7 +11330,7 @@ def test_import_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8552,40 +11350,42 @@ def test_import_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_statefile(request) + response = client.create_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_statefile_rest_unset_required_fields(): +def test_create_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_statefile._get_unset_required_fields({}) + unset_fields = transport.create_preview._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "previewId", + "requestId", + ) + ) & set( ( "parent", - "lockId", + "preview", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_statefile_rest_interceptors(null_interceptor): +def test_create_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8596,13 +11396,15 @@ def test_import_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_import_statefile" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_create_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_import_statefile" + transports.ConfigRestInterceptor, "pre_create_preview" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) + pb_message = config.CreatePreviewRequest.pb(config.CreatePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8613,17 +11415,19 @@ def test_import_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = config.ImportStatefileRequest() + request = config.CreatePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = operations_pb2.Operation() - client.import_statefile( + client.create_preview( request, metadata=[ ("key", "val"), @@ -8635,8 +11439,8 @@ def test_import_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ImportStatefileRequest +def test_create_preview_rest_bad_request( + transport: str = "rest", request_type=config.CreatePreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8644,7 +11448,7 @@ def test_import_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8656,10 +11460,10 @@ def test_import_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_statefile(request) + client.create_preview(request) -def test_import_statefile_rest_flattened(): +def test_create_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8668,43 +11472,42 @@ def test_import_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - lock_id=725, + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.import_statefile(**mock_args) + client.create_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_import_statefile_rest_flattened_error(transport: str = "rest"): +def test_create_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8713,14 +11516,18 @@ def test_import_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.import_statefile( - config.ImportStatefileRequest(), + client.create_preview( + config.CreatePreviewRequest(), parent="parent_value", - lock_id=725, + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) -def test_import_statefile_rest_error(): +def test_create_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8729,46 +11536,68 @@ def test_import_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.DeleteStatefileRequest, + config.GetPreviewRequest, dict, ], ) -def test_delete_statefile_rest(request_type): +def test_get_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_statefile(request) + response = client.get_preview(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" -def test_delete_statefile_rest_required_fields( - request_type=config.DeleteStatefileRequest, -): +def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8783,24 +11612,21 @@ def test_delete_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8809,7 +11635,7 @@ def test_delete_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8821,45 +11647,39 @@ def test_delete_statefile_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_statefile(request) + response = client.get_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_statefile_rest_unset_required_fields(): +def test_get_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_statefile._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "lockId", - ) - ) - ) + unset_fields = transport.get_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_statefile_rest_interceptors(null_interceptor): +def test_get_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8870,10 +11690,13 @@ def test_delete_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_statefile" + transports.ConfigRestInterceptor, "post_get_preview" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_preview" ) as pre: pre.assert_not_called() - pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) + post.assert_not_called() + pb_message = config.GetPreviewRequest.pb(config.GetPreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8884,15 +11707,17 @@ def test_delete_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = config.Preview.to_json(config.Preview()) - request = config.DeleteStatefileRequest() + request = config.GetPreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = config.Preview() - client.delete_statefile( + client.get_preview( request, metadata=[ ("key", "val"), @@ -8901,10 +11726,11 @@ def test_delete_statefile_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_statefile_rest_bad_request( - transport: str = "rest", request_type=config.DeleteStatefileRequest +def test_get_preview_rest_bad_request( + transport: str = "rest", request_type=config.GetPreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8912,7 +11738,7 @@ def test_delete_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8924,10 +11750,10 @@ def test_delete_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_statefile(request) + client.get_preview(request) -def test_delete_statefile_rest_flattened(): +def test_get_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8936,12 +11762,10 @@ def test_delete_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -8952,24 +11776,25 @@ def test_delete_statefile_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_statefile(**mock_args) + client.get_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, args[1], ) -def test_delete_statefile_rest_flattened_error(transport: str = "rest"): +def test_get_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8978,13 +11803,13 @@ def test_delete_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_statefile( - config.DeleteStatefileRequest(), + client.get_preview( + config.GetPreviewRequest(), name="name_value", ) -def test_delete_statefile_rest_error(): +def test_get_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8993,45 +11818,50 @@ def test_delete_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.LockDeploymentRequest, + config.ListPreviewsRequest, dict, ], ) -def test_lock_deployment_rest(request_type): +def test_list_previews_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.lock_deployment(request) + response = client.list_previews(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListPreviewsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_lock_deployment_rest_required_fields( - request_type=config.LockDeploymentRequest, -): +def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9046,21 +11876,30 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9069,7 +11908,7 @@ def test_lock_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9081,37 +11920,49 @@ def test_lock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.lock_deployment(request) + response = client.list_previews(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lock_deployment_rest_unset_required_fields(): +def test_list_previews_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lock_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_previews._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lock_deployment_rest_interceptors(null_interceptor): +def test_list_previews_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9122,15 +11973,13 @@ def test_lock_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_lock_deployment" + transports.ConfigRestInterceptor, "post_list_previews" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_lock_deployment" + transports.ConfigRestInterceptor, "pre_list_previews" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) + pb_message = config.ListPreviewsRequest.pb(config.ListPreviewsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9141,19 +11990,19 @@ def test_lock_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = config.ListPreviewsResponse.to_json( + config.ListPreviewsResponse() ) - request = config.LockDeploymentRequest() + request = config.ListPreviewsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.ListPreviewsResponse() - client.lock_deployment( + client.list_previews( request, metadata=[ ("key", "val"), @@ -9165,8 +12014,8 @@ def test_lock_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_lock_deployment_rest_bad_request( - transport: str = "rest", request_type=config.LockDeploymentRequest +def test_list_previews_rest_bad_request( + transport: str = "rest", request_type=config.ListPreviewsRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9174,7 +12023,7 @@ def test_lock_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9186,10 +12035,10 @@ def test_lock_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.lock_deployment(request) + client.list_previews(request) -def test_lock_deployment_rest_flattened(): +def test_list_previews_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9198,40 +12047,39 @@ def test_lock_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.lock_deployment(**mock_args) + client.list_previews(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_lock_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_previews_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9240,33 +12088,88 @@ def test_lock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.lock_deployment( - config.LockDeploymentRequest(), - name="name_value", + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", ) -def test_lock_deployment_rest_error(): +def test_list_previews_rest_pager(transport: str = "rest"): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_previews(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) + + pages = list(client.list_previews(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - config.UnlockDeploymentRequest, + config.DeletePreviewRequest, dict, ], ) -def test_unlock_deployment_rest(request_type): +def test_delete_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9281,20 +12184,17 @@ def test_unlock_deployment_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.unlock_deployment(request) + response = client.delete_preview(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_unlock_deployment_rest_required_fields( - request_type=config.UnlockDeploymentRequest, -): +def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9309,24 +12209,23 @@ def test_unlock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9347,10 +12246,9 @@ def test_unlock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -9360,32 +12258,24 @@ def test_unlock_deployment_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.unlock_deployment(request) + response = client.delete_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_unlock_deployment_rest_unset_required_fields(): +def test_delete_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.unlock_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "lockId", - ) - ) - ) + unset_fields = transport.delete_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_unlock_deployment_rest_interceptors(null_interceptor): +def test_delete_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9398,13 +12288,13 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ConfigRestInterceptor, "post_unlock_deployment" + transports.ConfigRestInterceptor, "post_delete_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_unlock_deployment" + transports.ConfigRestInterceptor, "pre_delete_preview" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) + pb_message = config.DeletePreviewRequest.pb(config.DeletePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9419,7 +12309,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = config.UnlockDeploymentRequest() + request = config.DeletePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9427,7 +12317,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.unlock_deployment( + client.delete_preview( request, metadata=[ ("key", "val"), @@ -9439,8 +12329,8 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_unlock_deployment_rest_bad_request( - transport: str = "rest", request_type=config.UnlockDeploymentRequest +def test_delete_preview_rest_bad_request( + transport: str = "rest", request_type=config.DeletePreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9448,7 +12338,7 @@ def test_unlock_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9460,10 +12350,10 @@ def test_unlock_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.unlock_deployment(request) + client.delete_preview(request) -def test_unlock_deployment_rest_flattened(): +def test_delete_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9475,14 +12365,11 @@ def test_unlock_deployment_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - lock_id=725, ) mock_args.update(sample_request) @@ -9493,20 +12380,19 @@ def test_unlock_deployment_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.unlock_deployment(**mock_args) + client.delete_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, args[1], ) -def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): +def test_delete_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9515,14 +12401,13 @@ def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.unlock_deployment( - config.UnlockDeploymentRequest(), + client.delete_preview( + config.DeletePreviewRequest(), name="name_value", - lock_id=725, ) -def test_unlock_deployment_rest_error(): +def test_delete_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9531,58 +12416,47 @@ def test_unlock_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ExportLockInfoRequest, + config.ExportPreviewResultRequest, dict, ], ) -def test_export_lock_info_rest(request_type): +def test_export_preview_result_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.LockInfo( - lock_id=725, - operation="operation_value", - info="info_value", - who="who_value", - version="version_value", - ) + return_value = config.ExportPreviewResultResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.ExportPreviewResultResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_lock_info(request) + response = client.export_preview_result(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.LockInfo) - assert response.lock_id == 725 - assert response.operation == "operation_value" - assert response.info == "info_value" - assert response.who == "who_value" - assert response.version == "version_value" + assert isinstance(response, config.ExportPreviewResultResponse) -def test_export_lock_info_rest_required_fields( - request_type=config.ExportLockInfoRequest, +def test_export_preview_result_rest_required_fields( + request_type=config.ExportPreviewResultRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9597,21 +12471,21 @@ def test_export_lock_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9620,7 +12494,7 @@ def test_export_lock_info_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = config.ExportPreviewResultResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9632,39 +12506,40 @@ def test_export_lock_info_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.ExportPreviewResultResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_lock_info(request) + response = client.export_preview_result(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_lock_info_rest_unset_required_fields(): +def test_export_preview_result_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_lock_info._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_preview_result._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_lock_info_rest_interceptors(null_interceptor): +def test_export_preview_result_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9675,13 +12550,15 @@ def test_export_lock_info_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_lock_info" + transports.ConfigRestInterceptor, "post_export_preview_result" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_lock_info" + transports.ConfigRestInterceptor, "pre_export_preview_result" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) + pb_message = config.ExportPreviewResultRequest.pb( + config.ExportPreviewResultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9692,17 +12569,19 @@ def test_export_lock_info_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.LockInfo.to_json(config.LockInfo()) + req.return_value._content = config.ExportPreviewResultResponse.to_json( + config.ExportPreviewResultResponse() + ) - request = config.ExportLockInfoRequest() + request = config.ExportPreviewResultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.LockInfo() + post.return_value = config.ExportPreviewResultResponse() - client.export_lock_info( + client.export_preview_result( request, metadata=[ ("key", "val"), @@ -9714,8 +12593,8 @@ def test_export_lock_info_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_lock_info_rest_bad_request( - transport: str = "rest", request_type=config.ExportLockInfoRequest +def test_export_preview_result_rest_bad_request( + transport: str = "rest", request_type=config.ExportPreviewResultRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9723,7 +12602,7 @@ def test_export_lock_info_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9735,69 +12614,10 @@ def test_export_lock_info_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_lock_info(request) - - -def test_export_lock_info_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.LockInfo() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.export_lock_info(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" - % client.transport._host, - args[1], - ) - - -def test_export_lock_info_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_lock_info( - config.ExportLockInfoRequest(), - name="name_value", - ) + client.export_preview_result(request) -def test_export_lock_info_rest_error(): +def test_export_preview_result_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9958,6 +12778,11 @@ def test_config_base_transport(): "lock_deployment", "unlock_deployment", "export_lock_info", + "create_preview", + "get_preview", + "list_previews", + "delete_preview", + "export_preview_result", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -10292,6 +13117,21 @@ def test_config_client_transport_session_collision(transport_name): session1 = client1.transport.export_lock_info._session session2 = client2.transport.export_lock_info._session assert session1 != session2 + session1 = client1.transport.create_preview._session + session2 = client2.transport.create_preview._session + assert session1 != session2 + session1 = client1.transport.get_preview._session + session2 = client2.transport.get_preview._session + assert session1 != session2 + session1 = client1.transport.list_previews._session + session2 = client2.transport.list_previews._session + assert session1 != session2 + session1 = client1.transport.delete_preview._session + session2 = client2.transport.delete_preview._session + assert session1 != session2 + session1 = client1.transport.export_preview_result._session + session2 = client2.transport.export_preview_result._session + assert session1 != session2 def test_config_grpc_transport_channel(): @@ -10474,12 +13314,38 @@ def test_parse_deployment_path(): assert expected == actual -def test_resource_path(): +def test_preview_path(): project = "cuttlefish" location = "mussel" - deployment = "winkle" - revision = "nautilus" - resource = "scallop" + preview = "winkle" + expected = "projects/{project}/locations/{location}/previews/{preview}".format( + project=project, + location=location, + preview=preview, + ) + actual = ConfigClient.preview_path(project, location, preview) + assert expected == actual + + +def test_parse_preview_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "preview": "abalone", + } + path = ConfigClient.preview_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_preview_path(path) + assert expected == actual + + +def test_resource_path(): + project = "squid" + location = "clam" + deployment = "whelk" + revision = "octopus" + resource = "oyster" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( project=project, location=location, @@ -10495,11 +13361,11 @@ def test_resource_path(): def test_parse_resource_path(): expected = { - "project": "abalone", - "location": "squid", - "deployment": "clam", - "revision": "whelk", - "resource": "octopus", + "project": "nudibranch", + "location": "cuttlefish", + "deployment": "mussel", + "revision": "winkle", + "resource": "nautilus", } path = ConfigClient.resource_path(**expected) @@ -10509,10 +13375,10 @@ def test_parse_resource_path(): def test_revision_path(): - project = "oyster" - location = "nudibranch" - deployment = "cuttlefish" - revision = "mussel" + project = "scallop" + location = "abalone" + deployment = "squid" + revision = "clam" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( project=project, location=location, @@ -10525,10 +13391,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "winkle", - "location": "nautilus", - "deployment": "scallop", - "revision": "abalone", + "project": "whelk", + "location": "octopus", + "deployment": "oyster", + "revision": "nudibranch", } path = ConfigClient.revision_path(**expected) @@ -10538,8 +13404,8 @@ def test_parse_revision_path(): def test_service_account_path(): - project = "squid" - service_account = "clam" + project = "cuttlefish" + service_account = "mussel" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -10550,8 +13416,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "whelk", - "service_account": "octopus", + "project": "winkle", + "service_account": "nautilus", } path = ConfigClient.service_account_path(**expected) @@ -10561,9 +13427,9 @@ def test_parse_service_account_path(): def test_worker_pool_path(): - project = "oyster" - location = "nudibranch" - worker_pool = "cuttlefish" + project = "scallop" + location = "abalone" + worker_pool = "squid" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -10577,9 +13443,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "mussel", - "location": "winkle", - "worker_pool": "nautilus", + "project": "clam", + "location": "whelk", + "worker_pool": "octopus", } path = ConfigClient.worker_pool_path(**expected) @@ -10589,7 +13455,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -10599,7 +13465,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nudibranch", } path = ConfigClient.common_billing_account_path(**expected) @@ -10609,7 +13475,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -10619,7 +13485,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "mussel", } path = ConfigClient.common_folder_path(**expected) @@ -10629,7 +13495,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -10639,7 +13505,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "nautilus", } path = ConfigClient.common_organization_path(**expected) @@ -10649,7 +13515,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -10659,7 +13525,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "abalone", } path = ConfigClient.common_project_path(**expected) @@ -10669,8 +13535,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -10681,8 +13547,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "whelk", + "location": "octopus", } path = ConfigClient.common_location_path(**expected) diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index 76a4d514ccfd..f27c694683eb 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.38.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.37.0...google-cloud-container-v2.38.0) (2024-01-22) + + +### Features + +* Add fields desired_in_transit_encryption_config and in_transit_encryption_config ([c25ed93](https://github.com/googleapis/google-cloud-python/commit/c25ed93f4b0ffcfad99818e47dfcaf1bafc7c851)) + + +### Documentation + +* Remove Not GA comments for GetOpenIDConfig and GetJSONWebKeys ([c25ed93](https://github.com/googleapis/google-cloud-python/commit/c25ed93f4b0ffcfad99818e47dfcaf1bafc7c851)) + +## [2.37.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.36.0...google-cloud-container-v2.37.0) (2024-01-04) + + +### Features + +* [google-cloud-container] Add autoscaled node pool upgrade strategy ([#12135](https://github.com/googleapis/google-cloud-python/issues/12135)) ([1729080](https://github.com/googleapis/google-cloud-python/commit/172908041f50a3c661cea23dca3932b037005e95)) + ## [2.36.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.35.0...google-cloud-container-v2.36.0) (2023-12-07) diff --git a/packages/google-cloud-container/google/cloud/container/__init__.py b/packages/google-cloud-container/google/cloud/container/__init__.py index b65c7490c983..b00fa3ce0c1d 100644 --- a/packages/google-cloud-container/google/cloud/container/__init__.py +++ b/packages/google-cloud-container/google/cloud/container/__init__.py @@ -88,6 +88,7 @@ IdentityServiceConfig, ILBSubsettingConfig, IntraNodeVisibilityConfig, + InTransitEncryptionConfig, IPAllocationPolicy, IPv6AccessType, Jwk, @@ -353,6 +354,7 @@ "WorkloadMetadataConfig", "WorkloadPolicyConfig", "DatapathProvider", + "InTransitEncryptionConfig", "IPv6AccessType", "NodePoolUpdateStrategy", "PrivateIPv6GoogleAccess", diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 1dd0f86909a6..5c4537eade37 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.36.0" # {x-release-please-version} +__version__ = "2.38.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/__init__.py index fa4646278ff2..eda505d0b39a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/__init__.py @@ -83,6 +83,7 @@ IdentityServiceConfig, ILBSubsettingConfig, IntraNodeVisibilityConfig, + InTransitEncryptionConfig, IPAllocationPolicy, IPv6AccessType, Jwk, @@ -253,6 +254,7 @@ "IPAllocationPolicy", "IPv6AccessType", "IdentityServiceConfig", + "InTransitEncryptionConfig", "IntraNodeVisibilityConfig", "Jwk", "K8sBetaAPIConfig", diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 1dd0f86909a6..5c4537eade37 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.36.0" # {x-release-please-version} +__version__ = "2.38.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py index b15def4013d9..55e7b9d406df 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py @@ -2589,8 +2589,6 @@ async def get_json_web_keys( ) -> cluster_service.GetJSONWebKeysResponse: r"""Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. .. code-block:: python diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index b26f67ff07ba..1178ec87d158 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -2756,8 +2756,6 @@ def get_json_web_keys( ) -> cluster_service.GetJSONWebKeysResponse: r"""Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. .. code-block:: python diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py index a6d5cba5b624..331dc8043cb1 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py @@ -724,8 +724,6 @@ def get_json_web_keys( Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. Returns: Callable[[~.GetJSONWebKeysRequest], diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py index fc49c5efb387..f21edbb49f33 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py @@ -753,8 +753,6 @@ def get_json_web_keys( Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. Returns: Callable[[~.GetJSONWebKeysRequest], diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py index 235ab87c3357..a81b9c2e5769 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py @@ -77,6 +77,7 @@ IdentityServiceConfig, ILBSubsettingConfig, IntraNodeVisibilityConfig, + InTransitEncryptionConfig, IPAllocationPolicy, IPv6AccessType, Jwk, @@ -340,6 +341,7 @@ "WorkloadMetadataConfig", "WorkloadPolicyConfig", "DatapathProvider", + "InTransitEncryptionConfig", "IPv6AccessType", "NodePoolUpdateStrategy", "PrivateIPv6GoogleAccess", diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index f28e3ae34910..2719fa27281e 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -33,6 +33,7 @@ "NodePoolUpdateStrategy", "StackType", "IPv6AccessType", + "InTransitEncryptionConfig", "LinuxNodeConfig", "WindowsNodeConfig", "NodeKubeletConfig", @@ -311,6 +312,24 @@ class IPv6AccessType(proto.Enum): EXTERNAL = 2 +class InTransitEncryptionConfig(proto.Enum): + r"""Options for in-transit encryption. + + Values: + IN_TRANSIT_ENCRYPTION_CONFIG_UNSPECIFIED (0): + Unspecified, will be inferred as default - + IN_TRANSIT_ENCRYPTION_UNSPECIFIED. + IN_TRANSIT_ENCRYPTION_DISABLED (1): + In-transit encryption is disabled. + IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT (2): + Data in-transit is encrypted using inter-node + transparent encryption. + """ + IN_TRANSIT_ENCRYPTION_CONFIG_UNSPECIFIED = 0 + IN_TRANSIT_ENCRYPTION_DISABLED = 1 + IN_TRANSIT_ENCRYPTION_INTER_NODE_TRANSPARENT = 2 + + class LinuxNodeConfig(proto.Message): r"""Parameters that can be configured on Linux nodes. @@ -3407,6 +3426,10 @@ class ClusterUpdate(proto.Message): to all auto-provisioned node pools in autopilot clusters and node auto-provisioning enabled clusters. + desired_in_transit_encryption_config (google.cloud.container_v1.types.InTransitEncryptionConfig): + Specify the details of in-transit encryption. + + This field is a member of `oneof`_ ``_desired_in_transit_encryption_config``. """ desired_node_version: str = proto.Field( @@ -3655,6 +3678,12 @@ class ClusterUpdate(proto.Message): message="ResourceManagerTags", ) ) + desired_in_transit_encryption_config: "InTransitEncryptionConfig" = proto.Field( + proto.ENUM, + number=137, + optional=True, + enum="InTransitEncryptionConfig", + ) class AdditionalPodRangesConfig(proto.Message): @@ -7482,6 +7511,10 @@ class NetworkConfig(proto.Message): this cluster. This field is a member of `oneof`_ ``_enable_fqdn_network_policy``. + in_transit_encryption_config (google.cloud.container_v1.types.InTransitEncryptionConfig): + Specify the details of in-transit encryption. + + This field is a member of `oneof`_ ``_in_transit_encryption_config``. """ class ClusterNetworkPerformanceConfig(proto.Message): @@ -7577,6 +7610,12 @@ class Tier(proto.Enum): number=19, optional=True, ) + in_transit_encryption_config: "InTransitEncryptionConfig" = proto.Field( + proto.ENUM, + number=20, + optional=True, + enum="InTransitEncryptionConfig", + ) class GatewayAPIConfig(proto.Message): diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 1dd0f86909a6..5c4537eade37 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.36.0" # {x-release-please-version} +__version__ = "2.38.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py index 23363f4e8e3b..36b422f16647 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py @@ -2505,8 +2505,6 @@ async def get_json_web_keys( ) -> cluster_service.GetJSONWebKeysResponse: r"""Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. .. code-block:: python diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py index 8649f3e449bb..9ea3f6fe55c7 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -2672,8 +2672,6 @@ def get_json_web_keys( ) -> cluster_service.GetJSONWebKeysResponse: r"""Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. .. code-block:: python diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py index 0fae52c969e2..a00ec6cc74db 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py @@ -724,8 +724,6 @@ def get_json_web_keys( Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. Returns: Callable[[~.GetJSONWebKeysRequest], diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py index 2276c5c22ab2..cd9801945510 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py @@ -753,8 +753,6 @@ def get_json_web_keys( Gets the public component of the cluster signing keys in JSON Web Key format. - This API is not yet intended for general use, and is not - available for all clusters. Returns: Callable[[~.GetJSONWebKeysRequest], diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index 5fefba989dd0..38baa0cadba0 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -6349,12 +6349,22 @@ class GetNodePoolRequest(proto.Message): class BlueGreenSettings(proto.Message): r"""Settings for blue-green upgrade. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: standard_rollout_policy (google.cloud.container_v1beta1.types.BlueGreenSettings.StandardRolloutPolicy): Standard policy for the blue-green upgrade. + This field is a member of `oneof`_ ``rollout_policy``. + autoscaled_rollout_policy (google.cloud.container_v1beta1.types.BlueGreenSettings.AutoscaledRolloutPolicy): + Autoscaled policy for cluster autoscaler + enabled blue-green upgrade. + This field is a member of `oneof`_ ``rollout_policy``. node_pool_soak_duration (google.protobuf.duration_pb2.Duration): Time needed after draining entire blue pool. @@ -6407,12 +6417,24 @@ class StandardRolloutPolicy(proto.Message): message=duration_pb2.Duration, ) + class AutoscaledRolloutPolicy(proto.Message): + r"""Autoscaled rollout policy uses cluster autoscaler during + blue-green upgrades to scale both the green and blue pools. + + """ + standard_rollout_policy: StandardRolloutPolicy = proto.Field( proto.MESSAGE, number=1, oneof="rollout_policy", message=StandardRolloutPolicy, ) + autoscaled_rollout_policy: AutoscaledRolloutPolicy = proto.Field( + proto.MESSAGE, + number=3, + oneof="rollout_policy", + message=AutoscaledRolloutPolicy, + ) node_pool_soak_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, diff --git a/packages/google-cloud-container/noxfile.py b/packages/google-cloud-container/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-container/noxfile.py +++ b/packages/google-cloud-container/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index 521594519dc4..5b1d35bcd392 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.36.0" + "version": "2.38.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index 512ca89c36fb..6d6a96135d2a 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.36.0" + "version": "2.38.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/CHANGELOG.md b/packages/google-cloud-datacatalog/CHANGELOG.md index 7249da85375b..ba14cb5daa95 100644 --- a/packages/google-cloud-datacatalog/CHANGELOG.md +++ b/packages/google-cloud-datacatalog/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.17.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.17.1...google-cloud-datacatalog-v3.17.2) (2024-01-12) + + +### Documentation + +* [google-cloud-datacatalog] Change field behavior of the property "name" to IDENTIFIER for `PolicyTag` and `Taxonomy` ([#12163](https://github.com/googleapis/google-cloud-python/issues/12163)) ([1022ee8](https://github.com/googleapis/google-cloud-python/commit/1022ee8c42040c9660a22f4d40250964b4c4b37a)) + +## [3.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.17.0...google-cloud-datacatalog-v3.17.1) (2024-01-08) + + +### Documentation + +* [google-cloud-datacatalog] Change field behavior of the property "name" to IDENTIFIER for `PolicyTag` and `Taxonomy` ([#12161](https://github.com/googleapis/google-cloud-python/issues/12161)) ([46ea3b4](https://github.com/googleapis/google-cloud-python/commit/46ea3b4bac2ac0e18584e1686997fa632429d9ab)) + ## [3.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.16.0...google-cloud-datacatalog-v3.17.0) (2023-12-07) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index a1a26079aee2..8e711ca810d7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.17.0" # {x-release-please-version} +__version__ = "3.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index a1a26079aee2..8e711ca810d7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.17.0" # {x-release-please-version} +__version__ = "3.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py index d0fd1e9c0d45..b93294f0158a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -71,8 +71,8 @@ class Taxonomy(proto.Message): Attributes: name (str): - Output only. Resource name of this taxonomy - in URL format. + Identifier. Resource name of this taxonomy in + URL format. Note: Policy tag manager generates unique taxonomy IDs. display_name (str): @@ -191,7 +191,7 @@ class PolicyTag(proto.Message): Attributes: name (str): - Output only. Resource name of this policy tag + Identifier. Resource name of this policy tag in the URL format. The policy tag manager generates unique taxonomy IDs and policy tag IDs. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index a1a26079aee2..8e711ca810d7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.17.0" # {x-release-please-version} +__version__ = "3.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py index 3861cef6226d..c1fff8acc41a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py @@ -52,8 +52,7 @@ class Taxonomy(proto.Message): Attributes: name (str): - Output only. Resource name of this taxonomy, whose format - is: + Identifier. Resource name of this taxonomy, whose format is: "projects/{project_number}/locations/{location_id}/taxonomies/{id}". display_name (str): Required. User defined name of this taxonomy. @@ -165,7 +164,7 @@ class PolicyTag(proto.Message): Attributes: name (str): - Output only. Resource name of this policy tag, whose format + Identifier. Resource name of this policy tag, whose format is: "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{id}". display_name (str): diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index 5e9c717846aa..5ab22b639234 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.17.0" + "version": "3.17.2" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index c44b8429964a..fa75ff66dbd4 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.17.0" + "version": "3.17.2" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/CHANGELOG.md b/packages/google-cloud-dataplex/CHANGELOG.md index c64a0fbeda65..7eb4d59c8f8b 100644 --- a/packages/google-cloud-dataplex/CHANGELOG.md +++ b/packages/google-cloud-dataplex/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.10.0...google-cloud-dataplex-v1.11.0) (2024-01-04) + + +### Features + +* [google-cloud-dataplex] added enum value EventType.GOVERNANCE_RULE_PROCESSING ([#12132](https://github.com/googleapis/google-cloud-python/issues/12132)) ([48d42fd](https://github.com/googleapis/google-cloud-python/commit/48d42fdffd8bc55346b7b560c9fdfe685b69930c)) + + +### Documentation + +* [google-cloud-dataplex] Fix the comment for `ignore_null` field to clarify its applicability on data quality rules ([#12141](https://github.com/googleapis/google-cloud-python/issues/12141)) ([ca71481](https://github.com/googleapis/google-cloud-python/commit/ca71481a3ddc9ec1bf4474c70e0512341a8adc9c)) + ## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.9.0...google-cloud-dataplex-v1.10.0) (2023-12-07) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index 0611530e449b..4316e0e5df1f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index 0611530e449b..4316e0e5df1f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 086b1ab4ed73..cfc4351c5b8a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -437,7 +437,12 @@ class DataQualityRule(proto.Message): a rule, unless ``ignore_null`` is ``true``. In that case, such ``null`` rows are trivially considered passing. - This field is only valid for row-level type rules. + This field is only valid for the following type of rules: + + - RangeExpectation + - RegexExpectation + - SetExpectation + - UniquenessExpectation dimension (str): Required. The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index 4fdc60082e7b..9981dd07a9a6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -620,6 +620,8 @@ class EventType(proto.Enum): Rule processing exceeds the allowed limit. GOVERNANCE_RULE_ERRORS (17): Rule processing errors. + GOVERNANCE_RULE_PROCESSING (18): + Governance rule prcoessing Event. """ EVENT_TYPE_UNSPECIFIED = 0 RESOURCE_IAM_POLICY_UPDATE = 1 @@ -637,6 +639,7 @@ class EventType(proto.Enum): GOVERNANCE_RULE_MATCHED_RESOURCES = 15 GOVERNANCE_RULE_SEARCH_LIMIT_EXCEEDS = 16 GOVERNANCE_RULE_ERRORS = 17 + GOVERNANCE_RULE_PROCESSING = 18 class Entity(proto.Message): r"""Information about Entity resource that the log event is diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 00bf797319be..bea5cf536a42 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "1.10.0" + "version": "1.11.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/CHANGELOG.md b/packages/google-cloud-deploy/CHANGELOG.md index 04316816b82c..372d8abaa7e3 100644 --- a/packages/google-cloud-deploy/CHANGELOG.md +++ b/packages/google-cloud-deploy/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v1.15.0...google-cloud-deploy-v1.16.0) (2024-01-12) + + +### Features + +* Add stable cutback duration configuration to the k8s gateway service mesh deployment strategy ([e68b735](https://github.com/googleapis/google-cloud-python/commit/e68b73587d0944506f93425f9f09da1da4c220b3)) +* Updated logging protos with new fields ([e68b735](https://github.com/googleapis/google-cloud-python/commit/e68b73587d0944506f93425f9f09da1da4c220b3)) + + +### Documentation + +* Fixed a number of comments ([e68b735](https://github.com/googleapis/google-cloud-python/commit/e68b73587d0944506f93425f9f09da1da4c220b3)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v1.14.0...google-cloud-deploy-v1.15.0) (2023-12-07) diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index ae06408a02c4..725b83961de2 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index ae06408a02c4..725b83961de2 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 646456a565f4..a81680ba213a 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -692,9 +692,8 @@ async def sample_update_delivery_pipeline(): overwritten in the ``DeliveryPipeline`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1443,8 +1442,8 @@ async def sample_update_target(): overwritten in the Target resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + overwritten if it's in the mask. If the user doesn't + provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1831,8 +1830,8 @@ async def sample_get_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -1942,8 +1941,8 @@ async def sample_create_custom_target_type(): The request object. The request object for ``CreateCustomTargetType``. parent (:class:`str`): Required. The parent collection in which the - ``CustomTargetType`` should be created in. Format should - be ``projects/{project_id}/locations/{location_name}``. + ``CustomTargetType`` should be created. Format should be + ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1974,8 +1973,8 @@ async def sample_create_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -2092,9 +2091,8 @@ async def sample_update_custom_target_type(): overwritten in the ``CustomTargetType`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2115,8 +2113,8 @@ async def sample_update_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -4351,9 +4349,8 @@ async def sample_update_automation(): overwritten in the ``Automation`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index d933789256a2..677f6abea710 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -1196,9 +1196,8 @@ def sample_update_delivery_pipeline(): overwritten in the ``DeliveryPipeline`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1929,8 +1928,8 @@ def sample_update_target(): overwritten in the Target resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + overwritten if it's in the mask. If the user doesn't + provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2308,8 +2307,8 @@ def sample_get_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -2410,8 +2409,8 @@ def sample_create_custom_target_type(): The request object. The request object for ``CreateCustomTargetType``. parent (str): Required. The parent collection in which the - ``CustomTargetType`` should be created in. Format should - be ``projects/{project_id}/locations/{location_name}``. + ``CustomTargetType`` should be created. Format should be + ``projects/{project_id}/locations/{location_name}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2442,8 +2441,8 @@ def sample_create_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -2562,9 +2561,8 @@ def sample_update_custom_target_type(): overwritten in the ``CustomTargetType`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2585,8 +2583,8 @@ def sample_update_custom_target_type(): A CustomTargetType defines a type of custom target that can be referenced in a Target in order to - facilitate deploying to a runtime that does not have - a 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ # Create or coerce a protobuf request object. @@ -4762,9 +4760,8 @@ def sample_update_automation(): overwritten in the ``Automation`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + will be overwritten if it's in the mask. If the user + doesn't provide a mask then all fields are overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index 554965c58bbe..f5edb0028a58 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -3397,8 +3397,8 @@ def __call__( A ``CustomTargetType`` defines a type of custom target that can be referenced in a ``Target`` in order to - facilitate deploying to a runtime that does not have a - 1P integration with Cloud Deploy. + facilitate deploying to other systems besides the + supported runtimes. """ diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index b2410864faea..a7a0190d4864 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -752,6 +752,13 @@ class GatewayServiceMesh(proto.Message): to propagate. The maximum configurable time is 3 hours, in seconds format. If unspecified, there is no wait time. + stable_cutback_duration (google.protobuf.duration_pb2.Duration): + Optional. The amount of time to migrate + traffic back from the canary Service to the + original Service during the stable phase + deployment. If specified, must be between 15s + and 3600s. If unspecified, there is no cutback + time. """ http_route: str = proto.Field( @@ -771,6 +778,11 @@ class GatewayServiceMesh(proto.Message): number=4, message=duration_pb2.Duration, ) + stable_cutback_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) class ServiceNetworking(proto.Message): r"""Information about the Kubernetes Service networking @@ -832,16 +844,16 @@ class CloudRunConfig(proto.Message): CustomCanaryDeployments. canary_revision_tags (MutableSequence[str]): Optional. A list of tags that are added to - the canary revision while the canary deployment - is in progress. + the canary revision while the canary phase is in + progress. prior_revision_tags (MutableSequence[str]): Optional. A list of tags that are added to - the prior revision while the canary deployment - is in progress. + the prior revision while the canary phase is in + progress. stable_revision_tags (MutableSequence[str]): Optional. A list of tags that are added to - the final stable revision after the canary - deployment is completed. + the final stable revision when the stable phase + is applied. """ automatic_traffic_control: bool = proto.Field( @@ -1125,10 +1137,10 @@ class CreateDeliveryPipelineRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -1180,17 +1192,17 @@ class UpdateDeliveryPipelineRequest(proto.Message): overwritten in the ``DeliveryPipeline`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + overwritten if it's in the mask. If the user doesn't provide + a mask then all fields are overwritten. delivery_pipeline (google.cloud.deploy_v1.types.DeliveryPipeline): Required. The ``DeliveryPipeline`` to update. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -1249,10 +1261,10 @@ class DeleteDeliveryPipelineRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes after the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -1965,10 +1977,10 @@ class CreateTargetRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2019,18 +2031,18 @@ class UpdateTargetRequest(proto.Message): Required. Field mask is used to specify the fields to be overwritten in the Target resource by the update. The fields specified in the update_mask are relative to the resource, - not the full request. A field will be overwritten if it is - in the mask. If the user does not provide a mask then all - fields will be overwritten. + not the full request. A field will be overwritten if it's in + the mask. If the user doesn't provide a mask then all fields + are overwritten. target (google.cloud.deploy_v1.types.Target): Required. The ``Target`` to update. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2088,10 +2100,10 @@ class DeleteTargetRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes after the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2145,8 +2157,8 @@ class CustomTargetType(proto.Message): r"""A ``CustomTargetType`` resource in the Cloud Deploy API. A ``CustomTargetType`` defines a type of custom target that can be - referenced in a ``Target`` in order to facilitate deploying to a - runtime that does not have a 1P integration with Cloud Deploy. + referenced in a ``Target`` in order to facilitate deploying to other + systems besides the supported runtimes. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -2485,7 +2497,7 @@ class CreateCustomTargetTypeRequest(proto.Message): Attributes: parent (str): Required. The parent collection in which the - ``CustomTargetType`` should be created in. Format should be + ``CustomTargetType`` should be created. Format should be ``projects/{project_id}/locations/{location_name}``. custom_target_type_id (str): Required. ID of the ``CustomTargetType``. @@ -2494,10 +2506,10 @@ class CreateCustomTargetTypeRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2549,17 +2561,17 @@ class UpdateCustomTargetTypeRequest(proto.Message): overwritten in the ``CustomTargetType`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + overwritten if it's in the mask. If the user doesn't provide + a mask then all fields are overwritten. custom_target_type (google.cloud.deploy_v1.types.CustomTargetType): Required. The ``CustomTargetType`` to update. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2618,10 +2630,10 @@ class DeleteCustomTargetTypeRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes after the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -2672,11 +2684,7 @@ class DeleteCustomTargetTypeRequest(proto.Message): class TargetAttribute(proto.Message): - r"""Contains criteria for selecting Targets. Attributes provided - must match the target resource in order for policy restrictions - to apply. E.g. if id "prod" and labels "foo: bar" are given the - target resource must match both that id and have that label in - order to be selected. + r"""Contains criteria for selecting Targets. Attributes: id (str): @@ -3266,8 +3274,8 @@ class RenderMetadata(proto.Message): Output only. Metadata associated with rendering for Cloud Run. custom (google.cloud.deploy_v1.types.CustomMetadata): - Output only. Custom metadata provided by user - defined render operation. + Output only. Custom metadata provided by + user-defined render operation. """ cloud_run: "CloudRunRenderMetadata" = proto.Field( @@ -3396,10 +3404,10 @@ class CreateReleaseRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -3747,7 +3755,7 @@ class Metadata(proto.Message): contains the information about the interactions between Automation service and this rollout. custom (google.cloud.deploy_v1.types.CustomMetadata): - Output only. Custom metadata provided by user defined + Output only. Custom metadata provided by user-defined ``Rollout`` operations. """ @@ -3780,8 +3788,8 @@ class DeployJobRunMetadata(proto.Message): Output only. Custom Target metadata associated with a ``DeployJobRun``. custom (google.cloud.deploy_v1.types.CustomMetadata): - Output only. Custom metadata provided by user - defined deploy operation. + Output only. Custom metadata provided by + user-defined deploy operation. """ cloud_run: "CloudRunMetadata" = proto.Field( @@ -3887,13 +3895,13 @@ class AutomationRolloutMetadata(proto.Message): class CustomMetadata(proto.Message): - r"""CustomMetadata contains information from a user defined + r"""CustomMetadata contains information from a user-defined operation. Attributes: values (MutableMapping[str, str]): Output only. Key-value pairs provided by the - user defined operation. + user-defined operation. """ values: MutableMapping[str, str] = proto.MapField( @@ -4347,10 +4355,10 @@ class CreateRolloutRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -5792,10 +5800,10 @@ class CreateAutomationRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -5847,17 +5855,17 @@ class UpdateAutomationRequest(proto.Message): overwritten in the ``Automation`` resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten - if it is in the mask. If the user does not provide a mask - then all fields will be overwritten. + if it's in the mask. If the user doesn't provide a mask then + all fields are overwritten. automation (google.cloud.deploy_v1.types.Automation): Required. The ``Automation`` to update. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes since the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If @@ -5916,10 +5924,10 @@ class DeleteAutomationRequest(proto.Message): request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. The server will guarantee that for at - least 60 minutes after the first request. + retry your request, the server knows to ignore + the request if it has already been completed. + The server guarantees that for at least 60 + minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deliverypipeline_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deliverypipeline_notification_payload.py index 4a17dd727510..d0069fc983bf 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deliverypipeline_notification_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/deliverypipeline_notification_payload.py @@ -39,6 +39,8 @@ class DeliveryPipelineNotificationEvent(proto.Message): message (str): Debug message for when a notification fails to send. + pipeline_uid (str): + Unique identifier of the ``DeliveryPipeline``. delivery_pipeline (str): The name of the ``Delivery Pipeline``. type_ (google.cloud.deploy_v1.types.Type): @@ -50,6 +52,10 @@ class DeliveryPipelineNotificationEvent(proto.Message): proto.STRING, number=1, ) + pipeline_uid: str = proto.Field( + proto.STRING, + number=4, + ) delivery_pipeline: str = proto.Field( proto.STRING, number=2, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/jobrun_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/jobrun_notification_payload.py index 4c03dbb9dbbc..c8d74f2c54a1 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/jobrun_notification_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/jobrun_notification_payload.py @@ -44,8 +44,12 @@ class JobRunNotificationEvent(proto.Message): Unique identifier of the ``DeliveryPipeline``. release_uid (str): Unique identifier of the ``Release``. + release (str): + The name of the ``Release``. rollout_uid (str): Unique identifier of the ``Rollout``. + rollout (str): + The name of the ``Rollout``. target_id (str): ID of the ``Target``. type_ (google.cloud.deploy_v1.types.Type): @@ -69,10 +73,18 @@ class JobRunNotificationEvent(proto.Message): proto.STRING, number=4, ) + release: str = proto.Field( + proto.STRING, + number=8, + ) rollout_uid: str = proto.Field( proto.STRING, number=5, ) + rollout: str = proto.Field( + proto.STRING, + number=9, + ) target_id: str = proto.Field( proto.STRING, number=6, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_notification_payload.py index 19abbdca9c38..8164093feca5 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_notification_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_notification_payload.py @@ -38,6 +38,10 @@ class ReleaseNotificationEvent(proto.Message): message (str): Debug message for when a notification fails to send. + pipeline_uid (str): + Unique identifier of the ``DeliveryPipeline``. + release_uid (str): + Unique identifier of the ``Release``. release (str): The name of the ``Release``. type_ (google.cloud.deploy_v1.types.Type): @@ -49,6 +53,14 @@ class ReleaseNotificationEvent(proto.Message): proto.STRING, number=1, ) + pipeline_uid: str = proto.Field( + proto.STRING, + number=4, + ) + release_uid: str = proto.Field( + proto.STRING, + number=5, + ) release: str = proto.Field( proto.STRING, number=2, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_render_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_render_payload.py index b90c3c30e9c4..4e45fbce31c1 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_render_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/release_render_payload.py @@ -19,7 +19,7 @@ import proto # type: ignore -from google.cloud.deploy_v1.types import cloud_deploy +from google.cloud.deploy_v1.types import cloud_deploy, log_enums __protobuf__ = proto.module( package="google.cloud.deploy.v1", @@ -38,8 +38,15 @@ class ReleaseRenderEvent(proto.Message): Debug message for when a render transition occurs. Provides further details as rendering progresses through render states. + pipeline_uid (str): + Unique identifier of the ``DeliveryPipeline``. release (str): - The name of the release. + The name of the release. release_uid is not in this log + message because we write some of these log messages at + release creation time, before we've generated the uid. + type_ (google.cloud.deploy_v1.types.Type): + Type of this notification, e.g. for a release + render state change event. release_render_state (google.cloud.deploy_v1.types.Release.RenderState): The state of the release render. """ @@ -48,10 +55,19 @@ class ReleaseRenderEvent(proto.Message): proto.STRING, number=1, ) + pipeline_uid: str = proto.Field( + proto.STRING, + number=4, + ) release: str = proto.Field( proto.STRING, number=2, ) + type_: log_enums.Type = proto.Field( + proto.ENUM, + number=5, + enum=log_enums.Type, + ) release_render_state: cloud_deploy.Release.RenderState = proto.Field( proto.ENUM, number=3, diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_notification_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_notification_payload.py index d2783c800188..321995704f92 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_notification_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_notification_payload.py @@ -42,13 +42,17 @@ class RolloutNotificationEvent(proto.Message): Unique identifier of the ``DeliveryPipeline``. release_uid (str): Unique identifier of the ``Release``. + release (str): + The name of the ``Release``. + rollout_uid (str): + Unique identifier of the ``Rollout``. rollout (str): The name of the ``Rollout``. + target_id (str): + ID of the ``Target`` that the rollout is deployed to. type_ (google.cloud.deploy_v1.types.Type): Type of this notification, e.g. for a Pub/Sub failure. - target_id (str): - ID of the ``Target`` that the rollout is deployed to. """ message: str = proto.Field( @@ -63,19 +67,27 @@ class RolloutNotificationEvent(proto.Message): proto.STRING, number=3, ) + release: str = proto.Field( + proto.STRING, + number=7, + ) + rollout_uid: str = proto.Field( + proto.STRING, + number=8, + ) rollout: str = proto.Field( proto.STRING, number=4, ) + target_id: str = proto.Field( + proto.STRING, + number=6, + ) type_: log_enums.Type = proto.Field( proto.ENUM, number=5, enum=log_enums.Type, ) - target_id: str = proto.Field( - proto.STRING, - number=6, - ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_update_payload.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_update_payload.py index 6470f5659e6d..25b082102f72 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_update_payload.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/rollout_update_payload.py @@ -34,22 +34,26 @@ class RolloutUpdateEvent(proto.Message): Platform Log event that describes the rollout update event. Attributes: + message (str): + Debug message for when a rollout update event + occurs. pipeline_uid (str): Unique identifier of the pipeline. release_uid (str): Unique identifier of the release. + release (str): + The name of the ``Release``. rollout (str): - The name of the rollout. + The name of the rollout. rollout_uid is not in this log + message because we write some of these log messages at + rollout creation time, before we've generated the uid. target_id (str): ID of the target. - rollout_update_type (google.cloud.deploy_v1.types.RolloutUpdateEvent.RolloutUpdateType): - The type of the rollout update. - message (str): - Debug message for when a rollout update event - occurs. type_ (google.cloud.deploy_v1.types.Type): Type of this notification, e.g. for a rollout update event. + rollout_update_type (google.cloud.deploy_v1.types.RolloutUpdateEvent.RolloutUpdateType): + The type of the rollout update. """ class RolloutUpdateType(proto.Enum): @@ -100,6 +104,10 @@ class RolloutUpdateType(proto.Enum): ADVANCE_REQUIRED = 12 ADVANCED = 13 + message: str = proto.Field( + proto.STRING, + number=6, + ) pipeline_uid: str = proto.Field( proto.STRING, number=1, @@ -108,6 +116,10 @@ class RolloutUpdateType(proto.Enum): proto.STRING, number=2, ) + release: str = proto.Field( + proto.STRING, + number=8, + ) rollout: str = proto.Field( proto.STRING, number=3, @@ -116,20 +128,16 @@ class RolloutUpdateType(proto.Enum): proto.STRING, number=4, ) - rollout_update_type: RolloutUpdateType = proto.Field( - proto.ENUM, - number=5, - enum=RolloutUpdateType, - ) - message: str = proto.Field( - proto.STRING, - number=6, - ) type_: log_enums.Type = proto.Field( proto.ENUM, number=7, enum=log_enums.Type, ) + rollout_update_type: RolloutUpdateType = proto.Field( + proto.ENUM, + number=5, + enum=RolloutUpdateType, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index e4dc6bb526ff..81dd0f18ee16 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "1.15.0" + "version": "1.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index ae03f8f618d9..25734700c02b 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -12775,6 +12775,7 @@ def test_create_delivery_pipeline_rest(request_type): "seconds": 751, "nanos": 543, }, + "stable_cutback_duration": {}, }, "service_networking": { "service": "service_value", @@ -13253,6 +13254,7 @@ def test_update_delivery_pipeline_rest(request_type): "seconds": 751, "nanos": 543, }, + "stable_cutback_duration": {}, }, "service_networking": { "service": "service_value", @@ -18379,6 +18381,7 @@ def test_create_release_rest(request_type): "seconds": 751, "nanos": 543, }, + "stable_cutback_duration": {}, }, "service_networking": { "service": "service_value", diff --git a/packages/google-cloud-dialogflow-cx/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/CHANGELOG.md index 8814cae5c927..d8da4ba79d20 100644 --- a/packages/google-cloud-dialogflow-cx/CHANGELOG.md +++ b/packages/google-cloud-dialogflow-cx/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.30.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.30.0...google-cloud-dialogflow-cx-v1.30.1) (2024-01-12) + + +### Documentation + +* [google-cloud-dialogflow-cx] Fix formatting due to unclosed backtick ([#12175](https://github.com/googleapis/google-cloud-python/issues/12175)) ([a43fa12](https://github.com/googleapis/google-cloud-python/commit/a43fa123a21739e83e10fd0328a56039690d800c)) + ## [1.30.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.29.0...google-cloud-dialogflow-cx-v1.30.0) (2023-12-07) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index 653173b3ea24..b655d1ad1d26 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.30.0" # {x-release-please-version} +__version__ = "1.30.1" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index 653173b3ea24..b655d1ad1d26 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.30.0" # {x-release-please-version} +__version__ = "1.30.1" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py index 4e1d4e7447d3..34b3f4b56fb2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py @@ -1108,7 +1108,7 @@ class BoostSpecs(proto.Message): applied. The full names of the referenced data stores. Formats: ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`` - \`projects/{project}/locations/{location}/dataStores/{data_store} + ``projects/{project}/locations/{location}/dataStores/{data_store}`` spec (MutableSequence[google.cloud.dialogflowcx_v3.types.BoostSpec]): Optional. A list of boosting specifications. """ @@ -1133,7 +1133,7 @@ class FilterSpecs(proto.Message): applied. The full names of the referenced data stores. Formats: ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`` - \`projects/{project}/locations/{location}/dataStores/{data_store} + ``projects/{project}/locations/{location}/dataStores/{data_store}`` filter (str): Optional. The filter expression to be applied. Expression syntax is documented at diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index 653173b3ea24..b655d1ad1d26 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.30.0" # {x-release-please-version} +__version__ = "1.30.1" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index 84204f7f030b..0cfe97db1794 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.30.0" + "version": "1.30.1" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index dbc4267ffc9c..3a4b816c1654 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.30.0" + "version": "1.30.1" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index 3a649dd668ea..4afb6fe23b64 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/dialogflow/#history +## [2.27.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.26.0...google-cloud-dialogflow-v2.27.0) (2024-01-04) + + +### Features + +* Add enable_conversation_augmented_query field to HumanAgentAssistantConfig.SuggestionFeatureConfig message ([c6d9113](https://github.com/googleapis/google-cloud-python/commit/c6d911353f33cbd946337c9cea5f30d2a34bfa59)) +* Add enable_conversation_augmented_query field to HumanAgentAssistantConfig.SuggestionFeatureConfig message ([a481c80](https://github.com/googleapis/google-cloud-python/commit/a481c80e541b9a21495fa507fc7ba895c5780869)) +* Add INTENT enum in SearchKnowledgeAnswer.AnswerType message ([c6d9113](https://github.com/googleapis/google-cloud-python/commit/c6d911353f33cbd946337c9cea5f30d2a34bfa59)) +* Add INTENT enum in SearchKnowledgeAnswer.AnswerType message ([a481c80](https://github.com/googleapis/google-cloud-python/commit/a481c80e541b9a21495fa507fc7ba895c5780869)) +* Add rewritten_query in field in SearchKnowledgeResponse message ([c6d9113](https://github.com/googleapis/google-cloud-python/commit/c6d911353f33cbd946337c9cea5f30d2a34bfa59)) +* Add rewritten_query in field in SearchKnowledgeResponse message ([a481c80](https://github.com/googleapis/google-cloud-python/commit/a481c80e541b9a21495fa507fc7ba895c5780869)) +* Add sections field to HumanAgentAssistantConfig.SuggestionQueryConfig ([c6d9113](https://github.com/googleapis/google-cloud-python/commit/c6d911353f33cbd946337c9cea5f30d2a34bfa59)) +* Add sections field to HumanAgentAssistantConfig.SuggestionQueryConfig ([a481c80](https://github.com/googleapis/google-cloud-python/commit/a481c80e541b9a21495fa507fc7ba895c5780869)) + + +### Documentation + +* Improved comments on audio_config proto ([c6d9113](https://github.com/googleapis/google-cloud-python/commit/c6d911353f33cbd946337c9cea5f30d2a34bfa59)) +* Improved comments on audio_config proto ([a481c80](https://github.com/googleapis/google-cloud-python/commit/a481c80e541b9a21495fa507fc7ba895c5780869)) + ## [2.26.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.25.0...google-cloud-dialogflow-v2.26.0) (2023-12-07) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 6a37b1b764dd..2d5ed6f35b7a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.0" # {x-release-please-version} +__version__ = "2.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 6a37b1b764dd..2d5ed6f35b7a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.0" # {x-release-please-version} +__version__ = "2.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py index 511d833cd6dd..2153e56cc452 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/audio_config.py @@ -409,23 +409,9 @@ class InputAudioConfig(proto.Message): documentation `__ for more details. model (str): - Which Speech model to select for the given request. Select - the model best suited to your domain to get best results. If - a model is not explicitly specified, then we auto-select a - model based on the parameters in the InputAudioConfig. If - enhanced speech model is enabled for the agent and an - enhanced version of the specified model for the language - does not exist, then the speech is recognized using the - standard version of the specified model. Refer to `Cloud - Speech API - documentation `__ - for more details. If you specify a model, the following - models typically have the best performance: - - - phone_call (best for Agent Assist and telephony) - - latest_short (best for Dialogflow non-telephony) - - command_and_search (best for very short utterances and - commands) + Optional. Which Speech model to select for the given + request. For more information, see `Speech + models `__. model_variant (google.cloud.dialogflow_v2.types.SpeechModelVariant): Which variant of the [Speech model][google.cloud.dialogflow.v2.InputAudioConfig.model] to @@ -665,10 +651,24 @@ class SpeechToTextConfig(proto.Message): model (str): Which Speech model to select. Select the model best suited to your domain to get best results. If a model is not - explicitly specified, then a default model is used. Refer to - `Cloud Speech API + explicitly specified, then Dialogflow auto-selects a model + based on other parameters in the SpeechToTextConfig and + Agent settings. If enhanced speech model is enabled for the + agent and an enhanced version of the specified model for the + language does not exist, then the speech is recognized using + the standard version of the specified model. Refer to `Cloud + Speech API documentation `__ - for more details. + for more details. If you specify a model, the following + models typically have the best performance: + + - phone_call (best for Agent Assist and telephony) + - latest_short (best for Dialogflow non-telephony) + - command_and_search + + Leave this field unspecified to use `Agent Speech + settings `__ + for model selection. use_timeout_based_endpointing (bool): Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py index 246e781645ac..9a4b77faeae9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py @@ -754,6 +754,8 @@ class SearchKnowledgeResponse(proto.Message): Most relevant snippets extracted from articles in the given knowledge base, ordered by confidence. + rewritten_query (str): + The rewritten query used to search knowledge. """ answers: MutableSequence["SearchKnowledgeAnswer"] = proto.RepeatedField( @@ -761,6 +763,10 @@ class SearchKnowledgeResponse(proto.Message): number=2, message="SearchKnowledgeAnswer", ) + rewritten_query: str = proto.Field( + proto.STRING, + number=3, + ) class SearchKnowledgeAnswer(proto.Message): @@ -786,13 +792,16 @@ class AnswerType(proto.Enum): ANSWER_TYPE_UNSPECIFIED (0): The answer has a unspecified type. FAQ (1): - The answer is from FAQ doucments. + The answer is from FAQ documents. GENERATIVE (2): The answer is from generative model. + INTENT (3): + The answer is from intent matching. """ ANSWER_TYPE_UNSPECIFIED = 0 FAQ = 1 GENERATIVE = 2 + INTENT = 3 class AnswerSource(proto.Message): r"""The sources of the answers. diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py index 0116df54aaa4..77752bcf1b71 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py @@ -354,12 +354,12 @@ class AutomatedAgentConfig(proto.Message): If environment is not specified, the default ``draft`` environment is used. session_ttl (google.protobuf.duration_pb2.Duration): - Optional. Sets Dialogflow CX session life - time. By default, a Dialogflow CX session - remains active and its data is stored for 30 - minutes after the last request is sent for the - session. This value should be no longer than 1 - day. + Optional. Configure lifetime of the + Dialogflow session. By default, a Dialogflow CX + session remains active and its data is stored + for 30 minutes after the last request is sent + for the session. This value should be no longer + than 1 day. """ agent: str = proto.Field( @@ -431,6 +431,9 @@ class SuggestionFeatureConfig(proto.Message): at answer records. Supported features: KNOWLEDGE_SEARCH. + enable_conversation_augmented_query (bool): + Optional. Enable including conversation context during query + answer generation. Supported features: KNOWLEDGE_SEARCH. suggestion_trigger_settings (google.cloud.dialogflow_v2.types.HumanAgentAssistantConfig.SuggestionTriggerSettings): Settings of suggestion trigger. @@ -457,6 +460,10 @@ class SuggestionFeatureConfig(proto.Message): proto.BOOL, number=14, ) + enable_conversation_augmented_query: bool = proto.Field( + proto.BOOL, + number=16, + ) suggestion_trigger_settings: "HumanAgentAssistantConfig.SuggestionTriggerSettings" = proto.Field( proto.MESSAGE, number=10, @@ -569,6 +576,10 @@ class SuggestionQueryConfig(proto.Message): Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. + sections (google.cloud.dialogflow_v2.types.HumanAgentAssistantConfig.SuggestionQueryConfig.Sections): + Optional. The customized sections chosen to + return when requesting a summary of a + conversation. """ class KnowledgeBaseQuerySource(proto.Message): @@ -679,6 +690,72 @@ class ContextFilterSettings(proto.Message): number=3, ) + class Sections(proto.Message): + r"""Custom sections to return when requesting a summary of a + conversation. This is only supported when ``baseline_model_version`` + == '2.0'. + + Supported features: CONVERSATION_SUMMARIZATION, + CONVERSATION_SUMMARIZATION_VOICE. + + Attributes: + section_types (MutableSequence[google.cloud.dialogflow_v2.types.HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType]): + The selected sections chosen to return when + requesting a summary of a conversation. A + duplicate selected section will be treated as a + single selected section. If section types are + not provided, the default will be {SITUATION, + ACTION, RESULT}. + """ + + class SectionType(proto.Enum): + r"""Selectable sections to return when requesting a summary of a + conversation. + + Values: + SECTION_TYPE_UNSPECIFIED (0): + Undefined section type, does not return + anything. + SITUATION (1): + What the customer needs help with or has + question about. Section name: "situation". + ACTION (2): + What the agent does to help the customer. + Section name: "action". + RESOLUTION (3): + Result of the customer service. A single word + describing the result of the conversation. + Section name: "resolution". + REASON_FOR_CANCELLATION (4): + Reason for cancellation if the customer requests for a + cancellation. "N/A" otherwise. Section name: + "reason_for_cancellation". + CUSTOMER_SATISFACTION (5): + "Unsatisfied" or "Satisfied" depending on the customer's + feelings at the end of the conversation. Section name: + "customer_satisfaction". + ENTITIES (6): + Key entities extracted from the conversation, + such as ticket number, order number, dollar + amount, etc. Section names are prefixed by + "entities/". + """ + SECTION_TYPE_UNSPECIFIED = 0 + SITUATION = 1 + ACTION = 2 + RESOLUTION = 3 + REASON_FOR_CANCELLATION = 4 + CUSTOMER_SATISFACTION = 5 + ENTITIES = 6 + + section_types: MutableSequence[ + "HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType" + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType", + ) + knowledge_base_query_source: "HumanAgentAssistantConfig.SuggestionQueryConfig.KnowledgeBaseQuerySource" = proto.Field( proto.MESSAGE, number=1, @@ -710,6 +787,13 @@ class ContextFilterSettings(proto.Message): number=7, message="HumanAgentAssistantConfig.SuggestionQueryConfig.ContextFilterSettings", ) + sections: "HumanAgentAssistantConfig.SuggestionQueryConfig.Sections" = ( + proto.Field( + proto.MESSAGE, + number=8, + message="HumanAgentAssistantConfig.SuggestionQueryConfig.Sections", + ) + ) class ConversationModelConfig(proto.Message): r"""Custom conversation models used in agent assist feature. diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index 6a37b1b764dd..2d5ed6f35b7a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.0" # {x-release-please-version} +__version__ = "2.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/audio_config.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/audio_config.py index c614d994356b..fa77b5bdf451 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/audio_config.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/audio_config.py @@ -471,23 +471,9 @@ class InputAudioConfig(proto.Message): documentation `__ for more details. model (str): - Which Speech model to select for the given request. Select - the model best suited to your domain to get best results. If - a model is not explicitly specified, then we auto-select a - model based on the parameters in the InputAudioConfig. If - enhanced speech model is enabled for the agent and an - enhanced version of the specified model for the language - does not exist, then the speech is recognized using the - standard version of the specified model. Refer to `Cloud - Speech API - documentation `__ - for more details. If you specify a model, the following - models typically have the best performance: - - - phone_call (best for Agent Assist and telephony) - - latest_short (best for Dialogflow non-telephony) - - command_and_search (best for very short utterances and - commands) + Optional. Which Speech model to select for the given + request. For more information, see `Speech + models `__. model_variant (google.cloud.dialogflow_v2beta1.types.SpeechModelVariant): Which variant of the [Speech model][google.cloud.dialogflow.v2beta1.InputAudioConfig.model] @@ -739,10 +725,24 @@ class SpeechToTextConfig(proto.Message): model (str): Which Speech model to select. Select the model best suited to your domain to get best results. If a model is not - explicitly specified, then a default model is used. Refer to - `Cloud Speech API + explicitly specified, then Dialogflow auto-selects a model + based on other parameters in the SpeechToTextConfig and + Agent settings. If enhanced speech model is enabled for the + agent and an enhanced version of the specified model for the + language does not exist, then the speech is recognized using + the standard version of the specified model. Refer to `Cloud + Speech API documentation `__ - for more details. + for more details. If you specify a model, the following + models typically have the best performance: + + - phone_call (best for Agent Assist and telephony) + - latest_short (best for Dialogflow non-telephony) + - command_and_search + + Leave this field unspecified to use `Agent Speech + settings `__ + for model selection. use_timeout_based_endpointing (bool): Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py index 5981f4aada06..f50508c523c8 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py @@ -826,6 +826,8 @@ class SearchKnowledgeResponse(proto.Message): Most relevant snippets extracted from articles in the given knowledge base, ordered by confidence. + rewritten_query (str): + The rewritten query used to search knowledge. """ answers: MutableSequence["SearchKnowledgeAnswer"] = proto.RepeatedField( @@ -833,6 +835,10 @@ class SearchKnowledgeResponse(proto.Message): number=2, message="SearchKnowledgeAnswer", ) + rewritten_query: str = proto.Field( + proto.STRING, + number=3, + ) class SearchKnowledgeAnswer(proto.Message): @@ -858,13 +864,16 @@ class AnswerType(proto.Enum): ANSWER_TYPE_UNSPECIFIED (0): The answer has a unspecified type. FAQ (1): - The answer is from FAQ doucments. + The answer is from FAQ documents. GENERATIVE (2): The answer is from generative model. + INTENT (3): + The answer is from intent matching. """ ANSWER_TYPE_UNSPECIFIED = 0 FAQ = 1 GENERATIVE = 2 + INTENT = 3 class AnswerSource(proto.Message): r"""The sources of the answers. diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py index 7bf0257f6778..012b3a4e8042 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py @@ -208,12 +208,12 @@ class AutomatedAgentConfig(proto.Message): If environment is not specified, the default ``draft`` environment is used. session_ttl (google.protobuf.duration_pb2.Duration): - Optional. Sets Dialogflow CX session life - time. By default, a Dialogflow CX session - remains active and its data is stored for 30 - minutes after the last request is sent for the - session. This value should be no longer than 1 - day. + Optional. Configure lifetime of the + Dialogflow session. By default, a Dialogflow CX + session remains active and its data is stored + for 30 minutes after the last request is sent + for the session. This value should be no longer + than 1 day. """ agent: str = proto.Field( @@ -286,6 +286,9 @@ class SuggestionFeatureConfig(proto.Message): at answer records. Supported features: KNOWLEDGE_SEARCH. + enable_conversation_augmented_query (bool): + Optional. Enable including conversation context during query + answer generation. Supported features: KNOWLEDGE_SEARCH. suggestion_trigger_settings (google.cloud.dialogflow_v2beta1.types.HumanAgentAssistantConfig.SuggestionTriggerSettings): Settings of suggestion trigger. @@ -312,6 +315,10 @@ class SuggestionFeatureConfig(proto.Message): proto.BOOL, number=14, ) + enable_conversation_augmented_query: bool = proto.Field( + proto.BOOL, + number=16, + ) suggestion_trigger_settings: "HumanAgentAssistantConfig.SuggestionTriggerSettings" = proto.Field( proto.MESSAGE, number=10, @@ -424,6 +431,10 @@ class SuggestionQueryConfig(proto.Message): Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped. + sections (google.cloud.dialogflow_v2beta1.types.HumanAgentAssistantConfig.SuggestionQueryConfig.Sections): + Optional. The customized sections chosen to + return when requesting a summary of a + conversation. """ class KnowledgeBaseQuerySource(proto.Message): @@ -534,6 +545,72 @@ class ContextFilterSettings(proto.Message): number=3, ) + class Sections(proto.Message): + r"""Custom sections to return when requesting a summary of a + conversation. This is only supported when ``baseline_model_version`` + == '2.0'. + + Supported features: CONVERSATION_SUMMARIZATION, + CONVERSATION_SUMMARIZATION_VOICE. + + Attributes: + section_types (MutableSequence[google.cloud.dialogflow_v2beta1.types.HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType]): + The selected sections chosen to return when + requesting a summary of a conversation. A + duplicate selected section will be treated as a + single selected section. If section types are + not provided, the default will be {SITUATION, + ACTION, RESULT}. + """ + + class SectionType(proto.Enum): + r"""Selectable sections to return when requesting a summary of a + conversation. + + Values: + SECTION_TYPE_UNSPECIFIED (0): + Undefined section type, does not return + anything. + SITUATION (1): + What the customer needs help with or has + question about. Section name: "situation". + ACTION (2): + What the agent does to help the customer. + Section name: "action". + RESOLUTION (3): + Result of the customer service. A single word + describing the result of the conversation. + Section name: "resolution". + REASON_FOR_CANCELLATION (4): + Reason for cancellation if the customer requests for a + cancellation. "N/A" otherwise. Section name: + "reason_for_cancellation". + CUSTOMER_SATISFACTION (5): + "Unsatisfied" or "Satisfied" depending on the customer's + feelings at the end of the conversation. Section name: + "customer_satisfaction". + ENTITIES (6): + Key entities extracted from the conversation, + such as ticket number, order number, dollar + amount, etc. Section names are prefixed by + "entities/". + """ + SECTION_TYPE_UNSPECIFIED = 0 + SITUATION = 1 + ACTION = 2 + RESOLUTION = 3 + REASON_FOR_CANCELLATION = 4 + CUSTOMER_SATISFACTION = 5 + ENTITIES = 6 + + section_types: MutableSequence[ + "HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType" + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="HumanAgentAssistantConfig.SuggestionQueryConfig.Sections.SectionType", + ) + knowledge_base_query_source: "HumanAgentAssistantConfig.SuggestionQueryConfig.KnowledgeBaseQuerySource" = proto.Field( proto.MESSAGE, number=1, @@ -565,6 +642,13 @@ class ContextFilterSettings(proto.Message): number=7, message="HumanAgentAssistantConfig.SuggestionQueryConfig.ContextFilterSettings", ) + sections: "HumanAgentAssistantConfig.SuggestionQueryConfig.Sections" = ( + proto.Field( + proto.MESSAGE, + number=8, + message="HumanAgentAssistantConfig.SuggestionQueryConfig.Sections", + ) + ) class ConversationModelConfig(proto.Message): r"""Custom conversation models used in agent assist feature. diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index e6da51f4c809..00543f879382 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.26.0" + "version": "2.27.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index 98a12de42d61..0e505e3bfc1d 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.26.0" + "version": "2.27.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py index fff0d62ac7f3..a16a907d0282 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py @@ -3491,6 +3491,7 @@ def test_create_conversation_profile_rest(request_type): "suggestion_feature": {"type_": 1}, "enable_event_based_suggestion": True, "disable_agent_query_logging": True, + "enable_conversation_augmented_query": True, "suggestion_trigger_settings": { "no_smalltalk": True, "only_end_user": True, @@ -3516,6 +3517,7 @@ def test_create_conversation_profile_rest(request_type): "drop_virtual_agent_messages": True, "drop_ivr_messages": True, }, + "sections": {"section_types": [1]}, }, "conversation_model_config": { "model": "model_value", @@ -3953,6 +3955,7 @@ def test_update_conversation_profile_rest(request_type): "suggestion_feature": {"type_": 1}, "enable_event_based_suggestion": True, "disable_agent_query_logging": True, + "enable_conversation_augmented_query": True, "suggestion_trigger_settings": { "no_smalltalk": True, "only_end_user": True, @@ -3978,6 +3981,7 @@ def test_update_conversation_profile_rest(request_type): "drop_virtual_agent_messages": True, "drop_ivr_messages": True, }, + "sections": {"section_types": [1]}, }, "conversation_model_config": { "model": "model_value", diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py index 6a5ac1330f40..95b03eb46602 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py @@ -2819,7 +2819,9 @@ def test_search_knowledge(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_knowledge), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = conversation.SearchKnowledgeResponse() + call.return_value = conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) response = client.search_knowledge(request) # Establish that the underlying gRPC stub method was called. @@ -2829,6 +2831,7 @@ def test_search_knowledge(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" def test_search_knowledge_empty_call(): @@ -2864,7 +2867,9 @@ async def test_search_knowledge_async( with mock.patch.object(type(client.transport.search_knowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversation.SearchKnowledgeResponse() + conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) ) response = await client.search_knowledge(request) @@ -2875,6 +2880,7 @@ async def test_search_knowledge_async( # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" @pytest.mark.asyncio @@ -5088,7 +5094,9 @@ def test_search_knowledge_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversation.SearchKnowledgeResponse() + return_value = conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -5103,6 +5111,7 @@ def test_search_knowledge_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" def test_search_knowledge_rest_required_fields( diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py index c6faaefe20a7..0620079e72b2 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py @@ -3497,6 +3497,7 @@ def test_create_conversation_profile_rest(request_type): "suggestion_feature": {"type_": 1}, "enable_event_based_suggestion": True, "disable_agent_query_logging": True, + "enable_conversation_augmented_query": True, "suggestion_trigger_settings": { "no_small_talk": True, "only_end_user": True, @@ -3522,6 +3523,7 @@ def test_create_conversation_profile_rest(request_type): "drop_virtual_agent_messages": True, "drop_ivr_messages": True, }, + "sections": {"section_types": [1]}, }, "conversation_model_config": { "model": "model_value", @@ -3960,6 +3962,7 @@ def test_update_conversation_profile_rest(request_type): "suggestion_feature": {"type_": 1}, "enable_event_based_suggestion": True, "disable_agent_query_logging": True, + "enable_conversation_augmented_query": True, "suggestion_trigger_settings": { "no_small_talk": True, "only_end_user": True, @@ -3985,6 +3988,7 @@ def test_update_conversation_profile_rest(request_type): "drop_virtual_agent_messages": True, "drop_ivr_messages": True, }, + "sections": {"section_types": [1]}, }, "conversation_model_config": { "model": "model_value", diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py index a7cf73aef164..137af1af088f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py @@ -3064,7 +3064,9 @@ def test_search_knowledge(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_knowledge), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = conversation.SearchKnowledgeResponse() + call.return_value = conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) response = client.search_knowledge(request) # Establish that the underlying gRPC stub method was called. @@ -3074,6 +3076,7 @@ def test_search_knowledge(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" def test_search_knowledge_empty_call(): @@ -3109,7 +3112,9 @@ async def test_search_knowledge_async( with mock.patch.object(type(client.transport.search_knowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - conversation.SearchKnowledgeResponse() + conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) ) response = await client.search_knowledge(request) @@ -3120,6 +3125,7 @@ async def test_search_knowledge_async( # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" @pytest.mark.asyncio @@ -5614,7 +5620,9 @@ def test_search_knowledge_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = conversation.SearchKnowledgeResponse() + return_value = conversation.SearchKnowledgeResponse( + rewritten_query="rewritten_query_value", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -5629,6 +5637,7 @@ def test_search_knowledge_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, conversation.SearchKnowledgeResponse) + assert response.rewritten_query == "rewritten_query_value" def test_search_knowledge_rest_required_fields( diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index 446357a0d980..1837ca2f68e9 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.11.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.11.4...google-cloud-discoveryengine-v0.11.5) (2023-12-13) + + +### Features + +* **v1alpha:** add engine support for conversational engine service ([9b171b1](https://github.com/googleapis/google-cloud-python/commit/9b171b158ebdb4d10feb4c0faed6407a9023c3a8)) +* **v1alpha:** add search tuning service ([9b171b1](https://github.com/googleapis/google-cloud-python/commit/9b171b158ebdb4d10feb4c0faed6407a9023c3a8)) +* **v1alpha:** add site search engine service ([9b171b1](https://github.com/googleapis/google-cloud-python/commit/9b171b158ebdb4d10feb4c0faed6407a9023c3a8)) +* **v1alpha:** support search summarization with citations and references ([9b171b1](https://github.com/googleapis/google-cloud-python/commit/9b171b158ebdb4d10feb4c0faed6407a9023c3a8)) + + +### Documentation + +* **v1alpha:** keep the API doc up-to-date with recent changes ([9b171b1](https://github.com/googleapis/google-cloud-python/commit/9b171b158ebdb4d10feb4c0faed6407a9023c3a8)) + ## [0.11.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.11.3...google-cloud-discoveryengine-v0.11.4) (2023-12-07) diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/search_tuning_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/search_tuning_service.rst new file mode 100644 index 000000000000..5eaf9d091505 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/search_tuning_service.rst @@ -0,0 +1,6 @@ +SearchTuningService +------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1alpha.services.search_tuning_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst index 0177e984c5cc..62a321adfcba 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/services_.rst @@ -11,5 +11,6 @@ Services for Google Cloud Discoveryengine v1alpha API recommendation_service schema_service search_service + search_tuning_service site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/site_search_engine_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/site_search_engine_service.rst index 438309914a4a..860259370b29 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/site_search_engine_service.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1alpha/site_search_engine_service.rst @@ -4,3 +4,7 @@ SiteSearchEngineService .. automodule:: google.cloud.discoveryengine_v1alpha.services.site_search_engine_service :members: :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 977b515991ee..768cd2103fa5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.4" # {x-release-please-version} +__version__ = "0.11.5" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index 977b515991ee..768cd2103fa5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.4" # {x-release-please-version} +__version__ = "0.11.5" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py index ce8cddfd715b..3a4a3df59544 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py @@ -38,6 +38,10 @@ ) from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, +) from .services.site_search_engine_service import ( SiteSearchEngineServiceAsyncClient, SiteSearchEngineServiceClient, @@ -143,10 +147,40 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.search_tuning_service import ( + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) +from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .types.site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, RecrawlUrisMetadata, RecrawlUrisRequest, RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, ) from .types.user_event import ( CompletionInfo, @@ -169,8 +203,15 @@ "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "SearchTuningServiceAsyncClient", "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", "BigQuerySource", "CollectUserEventRequest", "CompleteQueryRequest", @@ -191,6 +232,8 @@ "CreateEngineRequest", "CreateSchemaMetadata", "CreateSchemaRequest", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", "CustomAttribute", "DataStore", "DataStoreServiceClient", @@ -202,12 +245,22 @@ "DeleteEngineRequest", "DeleteSchemaMetadata", "DeleteSchemaRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", "Document", "DocumentInfo", "DocumentServiceClient", "DoubleList", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", "Engine", "EngineServiceClient", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", "FieldConfig", "GcsSource", "GetConversationRequest", @@ -215,6 +268,8 @@ "GetDocumentRequest", "GetEngineRequest", "GetSchemaRequest", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", @@ -234,6 +289,8 @@ "ListEnginesResponse", "ListSchemasRequest", "ListSchemasResponse", + "ListTargetSitesRequest", + "ListTargetSitesResponse", "MediaInfo", "PageInfo", "PanelInfo", @@ -260,9 +317,16 @@ "SearchResponse", "SearchServiceClient", "SearchTier", + "SearchTuningServiceClient", + "SiteSearchEngine", "SiteSearchEngineServiceClient", + "SiteVerificationInfo", "SolutionType", + "TargetSite", "TextInput", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "TransactionInfo", "TuneEngineMetadata", "TuneEngineRequest", @@ -273,6 +337,8 @@ "UpdateEngineRequest", "UpdateSchemaMetadata", "UpdateSchemaRequest", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "UserEvent", "UserEventServiceClient", "UserInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json index 6d78899820ae..754a0c2fd153 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json @@ -667,35 +667,234 @@ } } }, + "SearchTuningService": { + "clients": { + "grpc": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchTuningServiceAsyncClient", + "rpcs": { + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "rest": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + } + } + }, "SiteSearchEngineService": { "clients": { "grpc": { "libraryClient": "SiteSearchEngineServiceClient", "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, "RecrawlUris": { "methods": [ "recrawl_uris" ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] } } }, "grpc-async": { "libraryClient": "SiteSearchEngineServiceAsyncClient", "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, "RecrawlUris": { "methods": [ "recrawl_uris" ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] } } }, "rest": { "libraryClient": "SiteSearchEngineServiceClient", "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, "RecrawlUris": { "methods": [ "recrawl_uris" ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] } } } diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index 977b515991ee..768cd2103fa5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.4" # {x-release-please-version} +__version__ = "0.11.5" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py index a6f5d4dfd9ef..ee50f24b9c33 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py @@ -383,6 +383,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -497,6 +501,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py index 188f7bfc7d0b..12b9218eb744 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py @@ -460,6 +460,11 @@ def __call__( "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}:converse", "body": "*", }, + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}:converse", + "body": "*", + }, ] request, metadata = self._interceptor.pre_converse_conversation( request, metadata @@ -569,6 +574,11 @@ def __call__( "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", "body": "conversation", }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_create_conversation( request, metadata @@ -668,6 +678,10 @@ def __call__( "method": "delete", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_delete_conversation( request, metadata @@ -756,6 +770,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_get_conversation( request, metadata @@ -852,6 +870,10 @@ def __call__( "method": "get", "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + }, ] request, metadata = self._interceptor.pre_list_conversations( request, metadata @@ -951,6 +973,11 @@ def __call__( "uri": "/v1alpha/{conversation.name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", "body": "conversation", }, + { + "method": "patch", + "uri": "/v1alpha/{conversation.name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_update_conversation( request, metadata @@ -1100,6 +1127,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1214,6 +1245,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py index 459926110c2f..24412299c59b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py @@ -392,6 +392,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -446,6 +450,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1094,6 +1102,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1208,6 +1220,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index 4e535d28e5ba..f51fe6cb0390 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -440,6 +440,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -494,6 +498,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1340,6 +1348,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1454,6 +1466,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py index 5bda5ec55655..ba20e90e2136 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/async_client.py @@ -988,7 +988,8 @@ async def resume_engine( metadata: Sequence[Tuple[str, str]] = (), ) -> engine.Engine: r"""Resumes the training of an existing engine. Only applicable if - [solution_type][] is + [SolutionType][google.cloud.discoveryengine.v1alpha.SolutionType] + is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. .. code-block:: python diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py index 1f1bf4f377eb..32abc22677cd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py @@ -1239,7 +1239,8 @@ def resume_engine( metadata: Sequence[Tuple[str, str]] = (), ) -> engine.Engine: r"""Resumes the training of an existing engine. Only applicable if - [solution_type][] is + [SolutionType][google.cloud.discoveryengine.v1alpha.SolutionType] + is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. .. code-block:: python diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py index a8097ee159d1..1cc197a163e1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc.py @@ -415,7 +415,8 @@ def resume_engine( r"""Return a callable for the resume engine method over gRPC. Resumes the training of an existing engine. Only applicable if - [solution_type][] is + [SolutionType][google.cloud.discoveryengine.v1alpha.SolutionType] + is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. Returns: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py index e0d7366b4e01..688eb2b70e3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/grpc_asyncio.py @@ -427,7 +427,8 @@ def resume_engine( r"""Return a callable for the resume engine method over gRPC. Resumes the training of an existing engine. Only applicable if - [solution_type][] is + [SolutionType][google.cloud.discoveryengine.v1alpha.SolutionType] + is [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1alpha.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. Returns: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py index 99d2c9785d5b..ca59d9b35e45 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py @@ -476,6 +476,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -530,6 +534,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1462,6 +1470,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1576,6 +1588,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py index d27e3f402777..6001820c4b9e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py @@ -393,6 +393,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -507,6 +511,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py index bd56c651711b..790ca4cfcec3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py @@ -387,6 +387,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -441,6 +445,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1071,6 +1079,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1185,6 +1197,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py index fc12a160cb67..e94cba3b952e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py @@ -391,6 +391,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -505,6 +509,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/__init__.py new file mode 100644 index 000000000000..7d2067c40f30 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchTuningServiceAsyncClient +from .client import SearchTuningServiceClient + +__all__ = ( + "SearchTuningServiceClient", + "SearchTuningServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py new file mode 100644 index 000000000000..729d05c5ace1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/async_client.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +from .client import SearchTuningServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport + + +class SearchTuningServiceAsyncClient: + """Service for search tuning.""" + + _client: SearchTuningServiceClient + + DEFAULT_ENDPOINT = SearchTuningServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + + data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) + parse_data_store_path = staticmethod( + SearchTuningServiceClient.parse_data_store_path + ) + common_billing_account_path = staticmethod( + SearchTuningServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchTuningServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchTuningServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchTuningServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchTuningServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchTuningServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchTuningServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchTuningServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchTuningServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchTuningServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_info.__func__(SearchTuningServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_file.__func__(SearchTuningServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchTuningServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SearchTuningServiceClient).get_transport_class, + type(SearchTuningServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SearchTuningServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SearchTuningServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchTuningServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.TrainCustomModelRequest, dict]]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1alpha.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.train_custom_model, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py new file mode 100644 index 000000000000..5a13c41968e3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py @@ -0,0 +1,679 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc import SearchTuningServiceGrpcTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .transports.rest import SearchTuningServiceRestTransport + + +class SearchTuningServiceClientMeta(type): + """Metaclass for the SearchTuningService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SearchTuningServiceTransport]] + _transport_registry["grpc"] = SearchTuningServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchTuningServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SearchTuningServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SearchTuningServiceClient(metaclass=SearchTuningServiceClientMeta): + """Service for search tuning.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SearchTuningServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SearchTuningServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SearchTuningServiceTransport): + # transport is a SearchTuningServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.TrainCustomModelRequest, dict]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1alpha.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a search_tuning_service.TrainCustomModelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.train_custom_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SearchTuningServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/__init__.py new file mode 100644 index 000000000000..fb4142e152c7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport +from .grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .rest import SearchTuningServiceRestInterceptor, SearchTuningServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SearchTuningServiceTransport]] +_transport_registry["grpc"] = SearchTuningServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchTuningServiceRestTransport + +__all__ = ( + "SearchTuningServiceTransport", + "SearchTuningServiceGrpcTransport", + "SearchTuningServiceGrpcAsyncIOTransport", + "SearchTuningServiceRestTransport", + "SearchTuningServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py new file mode 100644 index 000000000000..cb450063eaff --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/base.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1alpha import gapic_version as package_version +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SearchTuningServiceTransport(abc.ABC): + """Abstract transport class for SearchTuningService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SearchTuningServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py new file mode 100644 index 000000000000..c3d05805ca1f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport + + +class SearchTuningServiceGrpcTransport(SearchTuningServiceTransport): + """gRPC backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SearchTuningServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3e70bb8096cd --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/grpc_asyncio.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport + + +class SearchTuningServiceGrpcAsyncIOTransport(SearchTuningServiceTransport): + """gRPC AsyncIO backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SearchTuningServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py new file mode 100644 index 000000000000..825a86eccd49 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py @@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchTuningServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchTuningServiceRestInterceptor: + """Interceptor for SearchTuningService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchTuningServiceRestTransport. + + .. code-block:: python + class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_train_custom_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_custom_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchTuningServiceRestTransport(interceptor=MyCustomSearchTuningServiceInterceptor()) + client = SearchTuningServiceClient(transport=transport) + + + """ + + def pre_train_custom_model( + self, + request: search_tuning_service.TrainCustomModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.TrainCustomModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_train_custom_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchTuningServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchTuningServiceRestInterceptor + + +class SearchTuningServiceRestTransport(SearchTuningServiceTransport): + """REST backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchTuningServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchTuningServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _TrainCustomModel(SearchTuningServiceRestStub): + def __hash__(self): + return hash("TrainCustomModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.TrainCustomModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train custom model method over HTTP. + + Args: + request (~.search_tuning_service.TrainCustomModelRequest): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{data_store=projects/*/locations/*/collections/*/dataStores/*}:trainCustomModel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_custom_model( + request, metadata + ) + pb_request = search_tuning_service.TrainCustomModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_custom_model(resp) + return resp + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchTuningServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py index 72618e1a9df2..9523334697c7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/async_client.py @@ -46,8 +46,16 @@ from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.services.site_search_engine_service import ( + pagers, +) +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) from .client import SiteSearchEngineServiceClient from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport @@ -68,6 +76,10 @@ class SiteSearchEngineServiceAsyncClient: parse_site_search_engine_path = staticmethod( SiteSearchEngineServiceClient.parse_site_search_engine_path ) + target_site_path = staticmethod(SiteSearchEngineServiceClient.target_site_path) + parse_target_site_path = staticmethod( + SiteSearchEngineServiceClient.parse_target_site_path + ) common_billing_account_path = staticmethod( SiteSearchEngineServiceClient.common_billing_account_path ) @@ -227,17 +239,134 @@ def __init__( client_info=client_info, ) - async def recrawl_uris( + async def get_site_search_engine( self, request: Optional[ - Union[site_search_engine_service.RecrawlUrisRequest, dict] + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.GetSiteSearchEngineRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (:class:`str`): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_site_search_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] ] = None, *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Request on-demand recrawl for a list of URIs. + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. .. code-block:: python @@ -250,18 +379,21 @@ async def recrawl_uris( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import discoveryengine_v1alpha - async def sample_recrawl_uris(): + async def sample_create_target_site(): # Create a client client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() # Initialize request argument(s) - request = discoveryengine_v1alpha.RecrawlUrisRequest( - site_search_engine="site_search_engine_value", - uris=['uris_value1', 'uris_value2'], + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, ) # Make the request - operation = client.recrawl_uris(request=request) + operation = client.create_target_site(request=request) print("Waiting for operation to complete...") @@ -271,10 +403,27 @@ async def sample_recrawl_uris(): print(response) Args: - request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.RecrawlUrisRequest, dict]]): + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.CreateTargetSiteRequest, dict]]): The request object. Request message for - [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite] method. + parent (:class:`str`): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (:class:`google.cloud.discoveryengine_v1alpha.types.TargetSite`): + Required. The + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -285,18 +434,138 @@ async def sample_recrawl_uris(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.RecrawlUrisResponse` Response message for - [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1alpha.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.CreateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] in + a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1alpha.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesRequest, dict]]): + The request object. Request message for [SiteSearchEngineService.s][] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchCreateTargetSites] method. """ # Create or coerce a protobuf request object. - request = site_search_engine_service.RecrawlUrisRequest(request) + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.recrawl_uris, + self._client._transport.batch_create_target_sites, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -304,9 +573,7 @@ async def sample_recrawl_uris(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("site_search_engine", request.site_search_engine),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -321,8 +588,1030 @@ async def sample_recrawl_uris(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - site_search_engine_service.RecrawlUrisResponse, - metadata_type=site_search_engine_service.RecrawlUrisMetadata, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.GetTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.UpdateTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (:class:`google.cloud.discoveryengine_v1alpha.types.TargetSite`): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1alpha.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.UpdateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.DeleteTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.DeleteTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesAsyncPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + parent (:class:`str`): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.ListTargetSitesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTargetSitesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.enable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.disable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.RecrawlUrisRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.recrawl_uris, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + async def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_verify_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + async def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusAsyncPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_domain_verification_status, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchDomainVerificationStatusAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py index e068cecfbdb8..b376115e0020 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py @@ -50,8 +50,16 @@ from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.services.site_search_engine_service import ( + pagers, +) +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport from .transports.grpc import SiteSearchEngineServiceGrpcTransport @@ -203,6 +211,30 @@ def parse_site_search_engine_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def target_site_path( + project: str, + location: str, + data_store: str, + target_site: str, + ) -> str: + """Returns a fully-qualified target_site string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + + @staticmethod + def parse_target_site_path(path: str) -> Dict[str, str]: + """Parses a target_site path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/siteSearchEngine/targetSites/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -447,17 +479,136 @@ def __init__( api_audience=client_options.api_audience, ) - def recrawl_uris( + def get_site_search_engine( self, request: Optional[ - Union[site_search_engine_service.RecrawlUrisRequest, dict] + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.GetSiteSearchEngineRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetSiteSearchEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.GetSiteSearchEngineRequest + ): + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_site_search_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] ] = None, *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Request on-demand recrawl for a list of URIs. + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. .. code-block:: python @@ -470,18 +621,21 @@ def recrawl_uris( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import discoveryengine_v1alpha - def sample_recrawl_uris(): + def sample_create_target_site(): # Create a client client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() # Initialize request argument(s) - request = discoveryengine_v1alpha.RecrawlUrisRequest( - site_search_engine="site_search_engine_value", - uris=['uris_value1', 'uris_value2'], + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, ) # Make the request - operation = client.recrawl_uris(request=request) + operation = client.create_target_site(request=request) print("Waiting for operation to complete...") @@ -491,10 +645,27 @@ def sample_recrawl_uris(): print(response) Args: - request (Union[google.cloud.discoveryengine_v1alpha.types.RecrawlUrisRequest, dict]): + request (Union[google.cloud.discoveryengine_v1alpha.types.CreateTargetSiteRequest, dict]): The request object. Request message for - [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite] method. + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (google.cloud.discoveryengine_v1alpha.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -505,29 +676,151 @@ def sample_recrawl_uris(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.RecrawlUrisResponse` Response message for - [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1alpha.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.CreateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.CreateTargetSiteRequest): + request = site_search_engine_service.CreateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] in + a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1alpha.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesRequest, dict]): + The request object. Request message for [SiteSearchEngineService.s][] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchCreateTargetSites] method. """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes - # in a site_search_engine_service.RecrawlUrisRequest. + # in a site_search_engine_service.BatchCreateTargetSitesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, site_search_engine_service.RecrawlUrisRequest): - request = site_search_engine_service.RecrawlUrisRequest(request) + if not isinstance( + request, site_search_engine_service.BatchCreateTargetSitesRequest + ): + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.recrawl_uris] + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_target_sites + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("site_search_engine", request.site_search_engine),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -542,8 +835,1055 @@ def sample_recrawl_uris(): response = operation.from_gapic( response, self._transport.operations_client, - site_search_engine_service.RecrawlUrisResponse, - metadata_type=site_search_engine_service.RecrawlUrisMetadata, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_get_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.GetTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.GetTargetSiteRequest): + request = site_search_engine_service.GetTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_update_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.UpdateTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (google.cloud.discoveryengine_v1alpha.types.TargetSite): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1alpha.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.UpdateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.UpdateTargetSiteRequest): + request = site_search_engine_service.UpdateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.DeleteTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DeleteTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.DeleteTargetSiteRequest): + request = site_search_engine_service.DeleteTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + parent (str): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.ListTargetSitesPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.ListTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.ListTargetSitesRequest): + request = site_search_engine_service.ListTargetSitesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_target_sites] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTargetSitesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.EnableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.EnableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.EnableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.enable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DisableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.DisableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.DisableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.disable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.RecrawlUrisRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.RecrawlUrisRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.RecrawlUrisRequest): + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recrawl_uris] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.BatchVerifyTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.BatchVerifyTargetSitesRequest + ): + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_verify_target_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.FetchDomainVerificationStatusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.FetchDomainVerificationStatusRequest + ): + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_domain_verification_status + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchDomainVerificationStatusPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/pagers.py new file mode 100644 index 000000000000..f602ec55a8ea --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/pagers.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) + + +class ListTargetSitesPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., site_search_engine_service.ListTargetSitesResponse], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTargetSitesAsyncPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[site_search_engine_service.ListTargetSitesResponse] + ], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., site_search_engine_service.FetchDomainVerificationStatusResponse + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[site_search_engine_service.FetchDomainVerificationStatusResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusAsyncPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + site_search_engine_service.FetchDomainVerificationStatusResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py index 1ae00e4102c9..595943980b84 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/base.py @@ -27,7 +27,10 @@ from google.oauth2 import service_account # type: ignore from google.cloud.discoveryengine_v1alpha import gapic_version as package_version -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -124,11 +127,66 @@ def __init__( def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.get_site_search_engine: gapic_v1.method.wrap_method( + self.get_site_search_engine, + default_timeout=None, + client_info=client_info, + ), + self.create_target_site: gapic_v1.method.wrap_method( + self.create_target_site, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_target_sites: gapic_v1.method.wrap_method( + self.batch_create_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.get_target_site: gapic_v1.method.wrap_method( + self.get_target_site, + default_timeout=None, + client_info=client_info, + ), + self.update_target_site: gapic_v1.method.wrap_method( + self.update_target_site, + default_timeout=None, + client_info=client_info, + ), + self.delete_target_site: gapic_v1.method.wrap_method( + self.delete_target_site, + default_timeout=None, + client_info=client_info, + ), + self.list_target_sites: gapic_v1.method.wrap_method( + self.list_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.enable_advanced_site_search: gapic_v1.method.wrap_method( + self.enable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), + self.disable_advanced_site_search: gapic_v1.method.wrap_method( + self.disable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), self.recrawl_uris: gapic_v1.method.wrap_method( self.recrawl_uris, default_timeout=None, client_info=client_info, ), + self.batch_verify_target_sites: gapic_v1.method.wrap_method( + self.batch_verify_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.fetch_domain_verification_status: gapic_v1.method.wrap_method( + self.fetch_domain_verification_status, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -145,6 +203,93 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Union[ + site_search_engine.SiteSearchEngine, + Awaitable[site_search_engine.SiteSearchEngine], + ], + ]: + raise NotImplementedError() + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Union[site_search_engine.TargetSite, Awaitable[site_search_engine.TargetSite]], + ]: + raise NotImplementedError() + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Union[ + site_search_engine_service.ListTargetSitesResponse, + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ], + ]: + raise NotImplementedError() + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def recrawl_uris( self, @@ -154,6 +299,27 @@ def recrawl_uris( ]: raise NotImplementedError() + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Union[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py index 25060a361306..ab8ee337cb0b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc.py @@ -24,7 +24,10 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport @@ -245,6 +248,273 @@ def operations_client(self) -> operations_v1.OperationsClient: # Return the client from cache. return self._operations_client + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + ~.SiteSearchEngine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] in + a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + ~.TargetSite]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + ~.ListTargetSitesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + @property def recrawl_uris( self, @@ -273,6 +543,71 @@ def recrawl_uris( ) return self._stubs["recrawl_uris"] + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + ~.FetchDomainVerificationStatusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py index 2250c40cf705..40254148b843 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/grpc_asyncio.py @@ -24,7 +24,10 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport from .grpc import SiteSearchEngineServiceGrpcTransport @@ -250,6 +253,277 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Return the client from cache. return self._operations_client + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Awaitable[site_search_engine.SiteSearchEngine], + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + Awaitable[~.SiteSearchEngine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] in + a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Awaitable[site_search_engine.TargetSite], + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + Awaitable[~.TargetSite]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + Awaitable[~.ListTargetSitesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + @property def recrawl_uris( self, @@ -279,6 +553,71 @@ def recrawl_uris( ) return self._stubs["recrawl_uris"] + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + Awaitable[~.FetchDomainVerificationStatusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py index 083745187262..4bd602b0adcd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py @@ -45,7 +45,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import SiteSearchEngineServiceTransport @@ -72,6 +75,86 @@ class SiteSearchEngineServiceRestInterceptor: .. code-block:: python class MyCustomSiteSearchEngineServiceInterceptor(SiteSearchEngineServiceRestInterceptor): + def pre_batch_create_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_verify_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_verify_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_domain_verification_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_domain_verification_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_site_search_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_site_search_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + def pre_recrawl_uris(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -80,30 +163,39 @@ def post_recrawl_uris(self, response): logging.log(f"Received response: {response}") return response + def pre_update_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_target_site(self, response): + logging.log(f"Received response: {response}") + return response + transport = SiteSearchEngineServiceRestTransport(interceptor=MyCustomSiteSearchEngineServiceInterceptor()) client = SiteSearchEngineServiceClient(transport=transport) """ - def pre_recrawl_uris( + def pre_batch_create_target_sites( self, - request: site_search_engine_service.RecrawlUrisRequest, + request: site_search_engine_service.BatchCreateTargetSitesRequest, metadata: Sequence[Tuple[str, str]], ) -> Tuple[ - site_search_engine_service.RecrawlUrisRequest, Sequence[Tuple[str, str]] + site_search_engine_service.BatchCreateTargetSitesRequest, + Sequence[Tuple[str, str]], ]: - """Pre-rpc interceptor for recrawl_uris + """Pre-rpc interceptor for batch_create_target_sites Override in a subclass to manipulate the request or metadata before they are sent to the SiteSearchEngineService server. """ return request, metadata - def post_recrawl_uris( + def post_batch_create_target_sites( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for recrawl_uris + """Post-rpc interceptor for batch_create_target_sites Override in a subclass to manipulate the response after it is returned by the SiteSearchEngineService server but before @@ -111,22 +203,25 @@ def post_recrawl_uris( """ return response - def pre_get_operation( + def pre_batch_verify_target_sites( self, - request: operations_pb2.GetOperationRequest, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[ + site_search_engine_service.BatchVerifyTargetSitesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_verify_target_sites Override in a subclass to manipulate the request or metadata before they are sent to the SiteSearchEngineService server. """ return request, metadata - def post_get_operation( + def post_batch_verify_target_sites( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + """Post-rpc interceptor for batch_verify_target_sites Override in a subclass to manipulate the response after it is returned by the SiteSearchEngineService server but before @@ -134,22 +229,24 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_create_target_site( self, - request: operations_pb2.ListOperationsRequest, + request: site_search_engine_service.CreateTargetSiteRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[ + site_search_engine_service.CreateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_target_site Override in a subclass to manipulate the request or metadata before they are sent to the SiteSearchEngineService server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_create_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_target_site Override in a subclass to manipulate the response after it is returned by the SiteSearchEngineService server but before @@ -157,123 +254,401 @@ def post_list_operations( """ return response + def pre_delete_target_site( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DeleteTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_target_site -@dataclasses.dataclass -class SiteSearchEngineServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: SiteSearchEngineServiceRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + def post_delete_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_target_site -class SiteSearchEngineServiceRestTransport(SiteSearchEngineServiceTransport): - """REST backend transport for SiteSearchEngineService. + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response - Service for managing site search related resources. + def pre_disable_advanced_site_search( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for disable_advanced_site_search - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_disable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_advanced_site_search - """ + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response - def __init__( + def pre_enable_advanced_site_search( self, - *, - host: str = "discoveryengine.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[SiteSearchEngineServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enable_advanced_site_search - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_enable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_fetch_domain_verification_status( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for fetch_domain_verification_status - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or SiteSearchEngineServiceRestInterceptor() - self._prep_wrapped_messages(client_info) + def post_fetch_domain_verification_status( + self, response: site_search_engine_service.FetchDomainVerificationStatusResponse + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + """Post-rpc interceptor for fetch_domain_verification_status - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response - This property caches on the instance; repeated calls return the same - client. + def pre_get_site_search_engine( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetSiteSearchEngineRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", - }, - { - "method": "get", - "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", - }, + return request, metadata + + def post_get_site_search_engine( + self, response: site_search_engine.SiteSearchEngine + ) -> site_search_engine.SiteSearchEngine: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_target_site( + self, + request: site_search_engine_service.GetTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_target_site( + self, response: site_search_engine.TargetSite + ) -> site_search_engine.TargetSite: + """Post-rpc interceptor for get_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_target_sites( + self, + request: site_search_engine_service.ListTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_target_sites( + self, response: site_search_engine_service.ListTargetSitesResponse + ) -> site_search_engine_service.ListTargetSitesResponse: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_recrawl_uris( + self, + request: site_search_engine_service.RecrawlUrisRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.RecrawlUrisRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_recrawl_uris( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_update_target_site( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.UpdateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_update_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SiteSearchEngineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SiteSearchEngineServiceRestInterceptor + + +class SiteSearchEngineServiceRestTransport(SiteSearchEngineServiceTransport): + """REST backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SiteSearchEngineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SiteSearchEngineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", @@ -320,6 +695,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -375,21 +754,1038 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: ], } - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1alpha", + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchCreateTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchCreateTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchCreateTargetSitesRequest): + The request object. Request message for [SiteSearchEngineService.s][] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_create_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_target_sites(resp) + return resp + + class _BatchVerifyTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchVerifyTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch verify target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchVerifyTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:batchVerifyTargetSites", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_verify_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_verify_target_sites(resp) + return resp + + class _CreateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("CreateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.CreateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create target site method over HTTP. + + Args: + request (~.site_search_engine_service.CreateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_create_target_site( + request, metadata + ) + pb_request = site_search_engine_service.CreateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_target_site(resp) + return resp + + class _DeleteTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DeleteTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete target site method over HTTP. + + Args: + request (~.site_search_engine_service.DeleteTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_target_site( + request, metadata + ) + pb_request = site_search_engine_service.DeleteTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target_site(resp) + return resp + + class _DisableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DisableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.DisableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_advanced_site_search(resp) + return resp + + class _EnableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("EnableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.EnableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_advanced_site_search(resp) + return resp + + class _FetchDomainVerificationStatus(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("FetchDomainVerificationStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + r"""Call the fetch domain verification + status method over HTTP. + + Args: + request (~.site_search_engine_service.FetchDomainVerificationStatusRequest): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.FetchDomainVerificationStatusResponse: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:fetchDomainVerificationStatus", + }, + ] + request, metadata = self._interceptor.pre_fetch_domain_verification_status( + request, metadata + ) + pb_request = ( + site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.FetchDomainVerificationStatusResponse() + pb_resp = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_domain_verification_status(resp) + return resp + + class _GetSiteSearchEngine(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetSiteSearchEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Call the get site search engine method over HTTP. + + Args: + request (~.site_search_engine_service.GetSiteSearchEngineRequest): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}", + }, + ] + request, metadata = self._interceptor.pre_get_site_search_engine( + request, metadata + ) + pb_request = site_search_engine_service.GetSiteSearchEngineRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.SiteSearchEngine() + pb_resp = site_search_engine.SiteSearchEngine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_site_search_engine(resp) + return resp + + class _GetTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Call the get target site method over HTTP. + + Args: + request (~.site_search_engine_service.GetTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.TargetSite: + A target site for the + SiteSearchEngine. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_get_target_site(request, metadata) + pb_request = site_search_engine_service.GetTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.TargetSite() + pb_resp = site_search_engine.TargetSite.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_target_site(resp) + return resp + + class _ListTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("ListTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.ListTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.ListTargetSitesResponse: + r"""Call the list target sites method over HTTP. + + Args: + request (~.site_search_engine_service.ListTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.ListTargetSitesResponse: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + }, + ] + request, metadata = self._interceptor.pre_list_target_sites( + request, metadata ) + pb_request = site_search_engine_service.ListTargetSitesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) ) + query_params.update(self._get_unset_required_fields(query_params)) - # Return the client from cache. - return self._operations_client + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.ListTargetSitesResponse() + pb_resp = site_search_engine_service.ListTargetSitesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_target_sites(resp) + return resp class _RecrawlUris(SiteSearchEngineServiceRestStub): def __hash__(self): @@ -494,6 +1890,218 @@ def __call__( resp = self._interceptor.post_recrawl_uris(resp) return resp + class _UpdateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("UpdateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update target site method over HTTP. + + Args: + request (~.site_search_engine_service.UpdateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + { + "method": "patch", + "uri": "/v1alpha/{target_site.name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_update_target_site( + request, metadata + ) + pb_request = site_search_engine_service.UpdateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_target_site(resp) + return resp + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchVerifyTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDomainVerificationStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSiteSearchEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTargetSites(self._session, self._host, self._interceptor) # type: ignore + @property def recrawl_uris( self, @@ -504,6 +2112,16 @@ def recrawl_uris( # In C++ this would require a dynamic_cast return self._RecrawlUris(self._session, self._host, self._interceptor) # type: ignore + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -533,6 +2151,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -647,6 +2269,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py index b7cd810b5d03..cd67558e89af 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py @@ -364,6 +364,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -418,6 +422,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1009,6 +1017,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1123,6 +1135,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py index 155ada9e8c6c..5bf045fb70bc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py @@ -110,10 +110,40 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .search_tuning_service import ( + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) +from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, RecrawlUrisMetadata, RecrawlUrisRequest, RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, ) from .user_event import ( CompletionInfo, @@ -211,9 +241,39 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", + "SiteSearchEngine", + "SiteVerificationInfo", + "TargetSite", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", + "ListTargetSitesRequest", + "ListTargetSitesResponse", "RecrawlUrisMetadata", "RecrawlUrisRequest", "RecrawlUrisResponse", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py index 084e01693e8f..ebc7cfd1d7bd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversation.py @@ -41,6 +41,8 @@ class Conversation(proto.Message): name (str): Immutable. Fully qualified name ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + or + ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1alpha.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py index 081efc3ff081..3eb6eb4a9944 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/conversational_search_service.py @@ -90,6 +90,26 @@ class ConverseConversationRequest(proto.Message): summary_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.ContentSearchSpec.SummarySpec): A specification for configuring the summary returned in the response. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. Filter expression is + case-sensitive. This will be used to filter search results + which may affect the summary response. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customer might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ """ name: str = proto.Field( @@ -126,6 +146,10 @@ class ConverseConversationRequest(proto.Message): message=search_service.SearchRequest.ContentSearchSpec.SummarySpec, ) ) + filter: str = proto.Field( + proto.STRING, + number=9, + ) class ConverseConversationResponse(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py index da951b8f75b0..e0963cd425f6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py @@ -111,7 +111,8 @@ class Content(proto.Message): uri (str): The URI of the content. Only Cloud Storage URIs (e.g. ``gs://bucket-name/path/to/file``) are supported. The - maximum file size is 100 MB. + maximum file size is 2.5 MB for text-based formats, 100 MB + for other formats. This field is a member of `oneof`_ ``content``. mime_type (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 7a80a3dbe6d5..4ad459268c89 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -104,6 +104,20 @@ class SearchRequest(proto.Message): For more information about filtering including syntax and filter operators, see `Filter `__ + canonical_filter (str): + The default filter that is applied when a user performs a + search without checking any filters on the search page. + + The filter applied to every search request when quality + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.discoveryengine.v1alpha.SearchRequest.filter]. order_by (str): The order in which documents are returned. Documents can be ordered by a field in an @@ -706,12 +720,47 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + model_prompt_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): + If specified, the spec will be used to modify + the prompt provided to the LLM. language_code (str): Language code for Summary. Use language tags defined by `BCP47 `__. Note: This is an experimental feature. + model_spec (google.cloud.discoveryengine_v1alpha.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec): + If specified, the spec will be used to modify + the model specification provided to the LLM. """ + class ModelPromptSpec(proto.Message): + r"""Specification of the prompt to use with the model. + + Attributes: + preamble (str): + Text at the beginning of the prompt that + instructs the assistant. Examples are available + in the user guide. + """ + + preamble: str = proto.Field( + proto.STRING, + number=1, + ) + + class ModelSpec(proto.Message): + r"""Specification of the model. + + Attributes: + version (str): + The string format of the model version. + e.g. stable, preview, etc. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + summary_result_count: int = proto.Field( proto.INT32, number=1, @@ -728,10 +777,22 @@ class SummarySpec(proto.Message): proto.BOOL, number=4, ) + model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( + proto.MESSAGE, + number=5, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec", + ) language_code: str = proto.Field( proto.STRING, number=6, ) + model_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec" = ( + proto.Field( + proto.MESSAGE, + number=7, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec", + ) + ) class ExtractiveContentSpec(proto.Message): r"""A specification for configuring the extractive content in a @@ -897,6 +958,10 @@ class EmbeddingVector(proto.Message): proto.STRING, number=7, ) + canonical_filter: str = proto.Field( + proto.STRING, + number=29, + ) order_by: str = proto.Field( proto.STRING, number=8, @@ -1180,6 +1245,8 @@ class Summary(proto.Message): safety_attributes (google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.SafetyAttributes): A collection of Safety Attribute categories and their associated confidence scores. + summary_with_metadata (google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.SummaryWithMetadata): + """ class SummarySkippedReason(proto.Enum): @@ -1252,6 +1319,125 @@ class SafetyAttributes(proto.Message): number=2, ) + class CitationMetadata(proto.Message): + r"""Citation metadata. + + Attributes: + citations (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.Citation]): + Citations for segments. + """ + + citations: MutableSequence[ + "SearchResponse.Summary.Citation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchResponse.Summary.Citation", + ) + + class Citation(proto.Message): + r"""Citation info for a segment. + + Attributes: + start_index (int): + Index indicates the start of the segment, + measured in bytes/unicode. + end_index (int): + End of the attributed segment, exclusive. + sources (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.CitationSource]): + Citation sources for the attributed segment. + """ + + start_index: int = proto.Field( + proto.INT64, + number=1, + ) + end_index: int = proto.Field( + proto.INT64, + number=2, + ) + sources: MutableSequence[ + "SearchResponse.Summary.CitationSource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.CitationSource", + ) + + class CitationSource(proto.Message): + r"""Citation source. + + Attributes: + reference_index (int): + Document reference index from + SummaryWithMetadata.references. It is 0-indexed and the + value will be zero if the reference_index is not set + explicitly. + """ + + reference_index: int = proto.Field( + proto.INT64, + number=4, + ) + + class Reference(proto.Message): + r"""Document reference. + + Attributes: + title (str): + Title of the document. + document (str): + Required. + [Document.name][google.cloud.discoveryengine.v1alpha.Document.name] + of the document. Full resource name of the referenced + document, in the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + uri (str): + GCS or HTTP uri for the document. + """ + + title: str = proto.Field( + proto.STRING, + number=1, + ) + document: str = proto.Field( + proto.STRING, + number=2, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + ) + + class SummaryWithMetadata(proto.Message): + r"""Summary with metadata information. + + Attributes: + summary (str): + Summary text with no citation information. + citation_metadata (google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.CitationMetadata): + Citation metadata for given summary. + references (MutableSequence[google.cloud.discoveryengine_v1alpha.types.SearchResponse.Summary.Reference]): + Document References. + """ + + summary: str = proto.Field( + proto.STRING, + number=1, + ) + citation_metadata: "SearchResponse.Summary.CitationMetadata" = proto.Field( + proto.MESSAGE, + number=2, + message="SearchResponse.Summary.CitationMetadata", + ) + references: MutableSequence[ + "SearchResponse.Summary.Reference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.Reference", + ) + summary_text: str = proto.Field( proto.STRING, number=1, @@ -1268,6 +1454,13 @@ class SafetyAttributes(proto.Message): number=3, message="SearchResponse.Summary.SafetyAttributes", ) + summary_with_metadata: "SearchResponse.Summary.SummaryWithMetadata" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="SearchResponse.Summary.SummaryWithMetadata", + ) + ) class GeoSearchDebugInfo(proto.Message): r"""Debug information specifically related to forward geocoding diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py new file mode 100644 index 000000000000..5207f5fbf0c7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_tuning_service.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1alpha.types import import_config + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1alpha", + manifest={ + "TrainCustomModelRequest", + "TrainCustomModelResponse", + "TrainCustomModelMetadata", + }, +) + + +class TrainCustomModelRequest(proto.Message): + r"""Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_training_input (google.cloud.discoveryengine_v1alpha.types.TrainCustomModelRequest.GcsTrainingInput): + Gcs training input. + + This field is a member of `oneof`_ ``training_input``. + data_store (str): + Required. The resource name of the Data Store, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to train + the models. + model_type (str): + Model to be trained. Supported values are: + + - **search-tuning**: Fine tuning the search system based on + data provided. + error_config (google.cloud.discoveryengine_v1alpha.types.ImportErrorConfig): + The desired location of errors incurred + during the data ingestion and training. + """ + + class GcsTrainingInput(proto.Message): + r"""Gcs training data input. + + Attributes: + corpus_data_path (str): + The gcs corpus data which could be associated in train data. + The data path format is + gs:///. A newline delimited + jsonl/ndjson file. + + - For search-tuning model, each line should have the \_id, + title and text. Example: {"_id": "doc1", title: "relevant + doc", "text": "relevant text"} + query_data_path (str): + The gcs query data which could be associated in train data. + The data path format is + gs:///. A newline delimited + jsonl/ndjson file. + + - For search-tuning model, each line should have the \_id + and text. Example: {"_id": "query1", "text": "example + query"} + train_data_path (str): + Gcs training data path whose format should be + gs:///. The file should be in + tsv format. Each line should have the doc_id and query_id + and score (number). + + - For search-tuning model, it should have the query-id + corpus-id score as tsv file header. The score should be a + number in [0, inf+). The larger the number is, the more + relevant the pair is. Example: query-id\tcorpus-id\tscore + query1\tdoc1\t1 + test_data_path (str): + Gcs test data. Same format as train_data_path. If not + provided, a random 80/20 train/test split will be performed + on train_data_path. + """ + + corpus_data_path: str = proto.Field( + proto.STRING, + number=1, + ) + query_data_path: str = proto.Field( + proto.STRING, + number=2, + ) + train_data_path: str = proto.Field( + proto.STRING, + number=3, + ) + test_data_path: str = proto.Field( + proto.STRING, + number=4, + ) + + gcs_training_input: GcsTrainingInput = proto.Field( + proto.MESSAGE, + number=2, + oneof="training_input", + message=GcsTrainingInput, + ) + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + model_type: str = proto.Field( + proto.STRING, + number=3, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=4, + message=import_config.ImportErrorConfig, + ) + + +class TrainCustomModelResponse(proto.Message): + r"""Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1alpha.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the data. + error_config (google.cloud.discoveryengine_v1alpha.types.ImportErrorConfig): + Echoes the destination for the complete + errors in the request if set. + model_status (str): + The trained model status. Possible values are: + + - **bad-data**: The training data quality is bad. + - **no-improvement**: Tuning didn't improve performance. + Won't deploy. + - **in-progress**: Model training is in progress. + - **ready**: The model is ready for serving. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=2, + message=import_config.ImportErrorConfig, + ) + model_status: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TrainCustomModelMetadata(proto.Message): + r"""Metadata related to the progress of the TrainCustomModel + operation. This is returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py new file mode 100644 index 000000000000..600e97c5ead4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1alpha", + manifest={ + "SiteSearchEngine", + "TargetSite", + "SiteVerificationInfo", + }, +) + + +class SiteSearchEngine(proto.Message): + r"""SiteSearchEngine captures DataStore level site search + persisting configurations. It is a singleton value per data + store. + + Attributes: + name (str): + The fully qualified resource name of the site search engine. + Format: + ``projects/*/locations/*/dataStores/*/siteSearchEngine`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TargetSite(proto.Message): + r"""A target site for the SiteSearchEngine. + + Attributes: + name (str): + Output only. The fully qualified resource name of the target + site. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}`` + The ``target_site_id`` is system-generated. + provided_uri_pattern (str): + Required. Input only. The user provided URI pattern from + which the ``generated_uri_pattern`` is generated. + type_ (google.cloud.discoveryengine_v1alpha.types.TargetSite.Type): + The type of the target site, e.g. whether the + site is to be included or excluded. + exact_match (bool): + Input only. If set to false, a uri_pattern is generated to + include all pages whose address contains the + provided_uri_pattern. If set to true, an uri_pattern is + generated to try to be an exact match of the + provided_uri_pattern or just the specific page if the + provided_uri_pattern is a specific one. provided_uri_pattern + is always normalized to generate the URI pattern to be used + by the search engine. + generated_uri_pattern (str): + Output only. This is system-generated based on the + provided_uri_pattern. + site_verification_info (google.cloud.discoveryengine_v1alpha.types.SiteVerificationInfo): + Output only. Site ownership and validity + verification status. + indexing_status (google.cloud.discoveryengine_v1alpha.types.TargetSite.IndexingStatus): + Output only. Indexing status. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The target site's last updated + time. + failure_reason (google.cloud.discoveryengine_v1alpha.types.TargetSite.FailureReason): + Output only. Failure reason. + """ + + class Type(proto.Enum): + r"""Possible target site types. + + Values: + TYPE_UNSPECIFIED (0): + This value is unused. In this case, server behavior defaults + to + [Type.INCLUDE][google.cloud.discoveryengine.v1alpha.TargetSite.Type.INCLUDE]. + INCLUDE (1): + Include the target site. + EXCLUDE (2): + Exclude the target site. + """ + TYPE_UNSPECIFIED = 0 + INCLUDE = 1 + EXCLUDE = 2 + + class IndexingStatus(proto.Enum): + r"""Target site indexing status enumeration. + + Values: + INDEXING_STATUS_UNSPECIFIED (0): + Defaults to SUCCEEDED. + PENDING (1): + The target site is in the update queue and + will be picked up by indexing pipeline. + FAILED (2): + The target site fails to be indexed. + SUCCEEDED (3): + The target site has been indexed. + DELETING (4): + The previously indexed target site has been + marked to be deleted. This is a transitioning + state which will resulted in either: + + 1. target site deleted if unindexing is + successful; + 2. state reverts to SUCCEEDED if the unindexing + fails. + """ + INDEXING_STATUS_UNSPECIFIED = 0 + PENDING = 1 + FAILED = 2 + SUCCEEDED = 3 + DELETING = 4 + + class FailureReason(proto.Message): + r"""Site search indexing failure reasons. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + quota_failure (google.cloud.discoveryengine_v1alpha.types.TargetSite.FailureReason.QuotaFailure): + Failed due to insufficient quota. + + This field is a member of `oneof`_ ``failure``. + """ + + class QuotaFailure(proto.Message): + r""" """ + + quota_failure: "TargetSite.FailureReason.QuotaFailure" = proto.Field( + proto.MESSAGE, + number=1, + oneof="failure", + message="TargetSite.FailureReason.QuotaFailure", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + provided_uri_pattern: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + exact_match: bool = proto.Field( + proto.BOOL, + number=6, + ) + generated_uri_pattern: str = proto.Field( + proto.STRING, + number=4, + ) + site_verification_info: "SiteVerificationInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="SiteVerificationInfo", + ) + indexing_status: IndexingStatus = proto.Field( + proto.ENUM, + number=8, + enum=IndexingStatus, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + failure_reason: FailureReason = proto.Field( + proto.MESSAGE, + number=9, + message=FailureReason, + ) + + +class SiteVerificationInfo(proto.Message): + r"""Verification information for target sites in advanced site + search. + + Attributes: + site_verification_state (google.cloud.discoveryengine_v1alpha.types.SiteVerificationInfo.SiteVerificationState): + Site verification state indicating the + ownership and validity. + verify_time (google.protobuf.timestamp_pb2.Timestamp): + Latest site verification time. + """ + + class SiteVerificationState(proto.Enum): + r"""Site verification state. + + Values: + SITE_VERIFICATION_STATE_UNSPECIFIED (0): + Defaults to VERIFIED. + VERIFIED (1): + Site ownership verified. + UNVERIFIED (2): + Site ownership pending verification or + verification failed. + EXEMPTED (3): + Site exempt from verification, e.g. a public + website that opens to all. + """ + SITE_VERIFICATION_STATE_UNSPECIFIED = 0 + VERIFIED = 1 + UNVERIFIED = 2 + EXEMPTED = 3 + + site_verification_state: SiteVerificationState = proto.Field( + proto.ENUM, + number=1, + enum=SiteVerificationState, + ) + verify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine_service.py index 22221948bbef..73a6f3298b8d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/site_search_engine_service.py @@ -20,16 +20,516 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine as gcd_site_search_engine, +) + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1alpha", manifest={ + "GetSiteSearchEngineRequest", + "CreateTargetSiteRequest", + "CreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "GetTargetSiteRequest", + "UpdateTargetSiteRequest", + "UpdateTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DeleteTargetSiteMetadata", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesResponse", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "DisableAdvancedSiteSearchMetadata", "RecrawlUrisRequest", "RecrawlUrisResponse", "RecrawlUrisMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", }, ) +class GetSiteSearchEngineRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine] + method. + + Attributes: + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it exists, + a PERMISSION_DENIED error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite] + method. + + Attributes: + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + target_site (google.cloud.discoveryengine_v1alpha.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_site_search_engine.TargetSite, + ) + + +class CreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesRequest(proto.Message): + r"""Request message for [SiteSearchEngineService.s][] method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being created. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + The parent field in the CreateBookRequest messages must + either be empty or match this field. + requests (MutableSequence[google.cloud.discoveryengine_v1alpha.types.CreateTargetSiteRequest]): + Required. The request message specifying the + resources to create. A maximum of 20 TargetSites + can be created in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateTargetSiteRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateTargetSiteRequest", + ) + + +class GetTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite] + method. + + Attributes: + target_site (google.cloud.discoveryengine_v1alpha.types.TargetSite): + Required. The target site to update. If the caller does not + have permission to update the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + """ + + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class UpdateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + parent (str): + Required. The parent site search engine resource name, such + as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]s + under this site search engine, regardless of whether or not + this branch exists, a PERMISSION_DENIED error is returned. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous ``ListTargetSites`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTargetSites`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1alpha.types.TargetSite]): + List of TargetSites. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchCreateTargetSite][] operation. This + will be returned by the google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchCreateTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1alpha.types.TargetSite]): + TargetSites created. + """ + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class EnableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + + +class EnableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DisableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1alpha.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + + +class DisableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class RecrawlUrisRequest(proto.Message): r"""Request message for [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.RecrawlUris] @@ -211,4 +711,138 @@ class RecrawlUrisMetadata(proto.Message): ) +class BatchVerifyTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being verified. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchVerifyTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + + +class BatchVerifyTargetSitesMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class FetchDomainVerificationStatusRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + site_search_engine (str): + Required. The site search engine resource under which we + fetch all the domain verification status. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous + ``FetchDomainVerificationStatus`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``FetchDomainVerificationStatus`` must match the call that + provided the page token. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchDomainVerificationStatusResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1alpha.types.TargetSite]): + List of TargetSites containing the site + verification status. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index 977b515991ee..768cd2103fa5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.4" # {x-release-please-version} +__version__ = "0.11.5" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_async.py new file mode 100644 index 000000000000..27b9c745fd0e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_sync.py new file mode 100644 index 000000000000..8bc3fc6b68dd --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1alpha.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_async.py new file mode 100644 index 000000000000..1d10623727eb --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1alpha.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_sync.py new file mode 100644 index 000000000000..a34df547ccc1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1alpha.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_async.py new file mode 100644 index 000000000000..0e1a9ac72a6b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_sync.py new file mode 100644 index 000000000000..c96d16fea749 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_async.py new file mode 100644 index 000000000000..077db01940cd --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_create_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_sync.py new file mode 100644 index 000000000000..9057e3c3c5d8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_create_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_async.py new file mode 100644 index 000000000000..7807560838bf --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_sync.py new file mode 100644 index 000000000000..1bd675059d5a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_async.py new file mode 100644 index 000000000000..80676647e07c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_sync.py new file mode 100644 index 000000000000..b6d4a7c34848 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_async.py new file mode 100644 index 000000000000..198999ff32df --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_sync.py new file mode 100644 index 000000000000..3d933c01d64b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_async.py new file mode 100644 index 000000000000..8a6dce4641b8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_sync.py new file mode 100644 index 000000000000..345ffcaeead3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_async.py new file mode 100644 index 000000000000..28f84d28595a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_sync.py new file mode 100644 index 000000000000..d4f2f5c03df1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_async.py new file mode 100644 index 000000000000..10f26abbe6aa --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_sync.py new file mode 100644 index 000000000000..d8dfc1e24e56 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_get_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_async.py new file mode 100644 index 000000000000..166017ba4d16 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_sync.py new file mode 100644 index 000000000000..dbb6d7a19b5f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_async.py new file mode 100644 index 000000000000..c932840dd90d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_sync.py new file mode 100644 index 000000000000..38e250eca0a1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_update_target_site(): + # Create a client + client = discoveryengine_v1alpha.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1alpha.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1alpha.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 3cc5fd41fe99..62400ff6ec43 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.4" + "version": "0.11.5" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 8a43aa06f2ee..67d077989daf 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.4" + "version": "0.11.5" }, "snippets": [ { @@ -5521,6 +5521,1737 @@ ], "title": "discoveryengine_v1alpha_generated_search_service_search_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceAsyncClient.train_custom_model", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "TrainCustomModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.TrainCustomModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "train_custom_model" + }, + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SearchTuningServiceClient.train_custom_model", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService.TrainCustomModel", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SearchTuningService", + "shortName": "SearchTuningService" + }, + "shortName": "TrainCustomModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.TrainCustomModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "train_custom_model" + }, + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SearchTuningService_TrainCustomModel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_search_tuning_service_train_custom_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_create_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.CreateTargetSiteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1alpha.types.TargetSite" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_target_site" + }, + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.CreateTargetSiteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1alpha.types.TargetSite" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_target_site" + }, + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_CreateTargetSite_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_create_target_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.delete_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "DeleteTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DeleteTargetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target_site" + }, + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.delete_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DeleteTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "DeleteTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DeleteTargetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target_site" + }, + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_DeleteTargetSite_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_delete_target_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.disable_advanced_site_search", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "DisableAdvancedSiteSearch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "disable_advanced_site_search" + }, + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.disable_advanced_site_search", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.DisableAdvancedSiteSearch", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "DisableAdvancedSiteSearch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.DisableAdvancedSiteSearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "disable_advanced_site_search" + }, + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_disable_advanced_site_search_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.enable_advanced_site_search", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "EnableAdvancedSiteSearch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "enable_advanced_site_search" + }, + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.enable_advanced_site_search", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.EnableAdvancedSiteSearch", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "EnableAdvancedSiteSearch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.EnableAdvancedSiteSearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "enable_advanced_site_search" + }, + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_enable_advanced_site_search_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.fetch_domain_verification_status", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "FetchDomainVerificationStatus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager", + "shortName": "fetch_domain_verification_status" + }, + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.fetch_domain_verification_status", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.FetchDomainVerificationStatus", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "FetchDomainVerificationStatus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.FetchDomainVerificationStatusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager", + "shortName": "fetch_domain_verification_status" + }, + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_fetch_domain_verification_status_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.get_site_search_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "GetSiteSearchEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetSiteSearchEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.SiteSearchEngine", + "shortName": "get_site_search_engine" + }, + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.get_site_search_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetSiteSearchEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "GetSiteSearchEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetSiteSearchEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.SiteSearchEngine", + "shortName": "get_site_search_engine" + }, + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_GetSiteSearchEngine_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_get_site_search_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.get_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "GetTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetTargetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.TargetSite", + "shortName": "get_target_site" + }, + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.get_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "GetTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.GetTargetSiteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.TargetSite", + "shortName": "get_target_site" + }, + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_GetTargetSite_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_get_target_site_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.list_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "ListTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager", + "shortName": "list_target_sites" + }, + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.list_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.ListTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "ListTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.ListTargetSitesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.services.site_search_engine_service.pagers.ListTargetSitesPager", + "shortName": "list_target_sites" + }, + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_ListTargetSites_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_list_target_sites_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5674,6 +7405,167 @@ ], "title": "discoveryengine_v1alpha_generated_site_search_engine_service_recrawl_uris_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceAsyncClient.update_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "UpdateTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1alpha.types.TargetSite" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_target_site" + }, + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.SiteSearchEngineServiceClient.update_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.UpdateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "UpdateTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1alpha.types.TargetSite" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_target_site" + }, + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_SiteSearchEngineService_UpdateTargetSite_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_site_search_engine_service_update_target_site_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index d08cdacac9da..eab8b26480f3 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.4" + "version": "0.11.5" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py index 36332ca4a1e0..7951e5979e8d 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py @@ -39,24 +39,33 @@ def partition( class discoveryengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_verify_target_sites': ('parent', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), - 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', ), + 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', ), 'create_conversation': ('parent', 'conversation', ), 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_target_site': ('parent', 'target_site', ), 'delete_conversation': ('name', ), 'delete_data_store': ('name', ), 'delete_document': ('name', ), 'delete_engine': ('name', ), 'delete_schema': ('name', ), + 'delete_target_site': ('name', ), + 'disable_advanced_site_search': ('site_search_engine', ), + 'enable_advanced_site_search': ('site_search_engine', ), + 'fetch_domain_verification_status': ('site_search_engine', 'page_size', 'page_token', ), 'get_conversation': ('name', ), 'get_data_store': ('name', ), 'get_document': ('name', ), 'get_engine': ('name', ), 'get_schema': ('name', ), + 'get_site_search_engine': ('name', ), + 'get_target_site': ('name', ), 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', 'auto_generate_ids', 'id_field', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), @@ -64,19 +73,22 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'pause_engine': ('name', ), 'purge_documents': ('parent', 'filter', 'force', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'resume_engine': ('name', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', ), 'tune_engine': ('name', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', ), 'update_engine': ('engine', 'update_mask', ), 'update_schema': ('schema', 'allow_missing', ), + 'update_target_site': ('target_site', ), 'write_user_event': ('parent', 'user_event', ), } diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py index 33ffb9ddb02f..7b1a3a4ccd98 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py @@ -1847,7 +1847,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -1877,7 +1877,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1910,7 +1910,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -1940,7 +1940,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py index c283f4206165..f81db6e0b84f 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py @@ -2829,6 +2829,25 @@ def test_create_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, }, }, "create_time": {"seconds": 751, "nanos": 543}, @@ -3493,6 +3512,25 @@ def test_update_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, }, }, "create_time": {"seconds": 751, "nanos": 543}, @@ -5320,7 +5358,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -5350,7 +5388,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5383,7 +5421,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -5413,7 +5451,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 2e6825c50037..a2c255b71a24 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -4645,7 +4645,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -4675,7 +4675,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4708,7 +4708,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -4738,7 +4738,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 272e143cddab..c048d3b23306 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -5261,7 +5261,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -5291,7 +5291,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5324,7 +5324,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -5354,7 +5354,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py index 2a79c280a8dc..63a2b2ed9760 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py @@ -6172,7 +6172,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -6202,7 +6202,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6235,7 +6235,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -6265,7 +6265,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py index b1036369a87a..87a4cd661ebb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py @@ -1895,7 +1895,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -1925,7 +1925,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1958,7 +1958,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -1988,7 +1988,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py index 04c0f0642987..6c0c212df2c8 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py @@ -4423,7 +4423,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -4453,7 +4453,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4486,7 +4486,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -4516,7 +4516,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py index 74876e64b2b3..ed2dbd5e13f5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py @@ -2125,7 +2125,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -2155,7 +2155,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2188,7 +2188,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -2218,7 +2218,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py new file mode 100644 index 000000000000..4892c2467183 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py @@ -0,0 +1,2369 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1alpha.services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, + transports, +) +from google.cloud.discoveryengine_v1alpha.types import ( + import_config, + search_tuning_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchTuningServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchTuningServiceGrpcTransport, "grpc"), + (transports.SearchTuningServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchTuningServiceRestTransport, "rest"), + ], +) +def test_search_tuning_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_search_tuning_service_client_get_transport_class(): + transport = SearchTuningServiceClient.get_transport_class() + available_transports = [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchTuningServiceClient.get_transport_class("grpc") + assert transport == transports.SearchTuningServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "true", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "false", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_tuning_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test_search_tuning_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + None, + ), + ], +) +def test_search_tuning_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_tuning_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1alpha.services.search_tuning_service.transports.SearchTuningServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchTuningServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_tuning_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_train_custom_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +@pytest.mark.asyncio +async def test_train_custom_model_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.TrainCustomModelRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_train_custom_model_async_from_dict(): + await test_train_custom_model_async(request_type=dict) + + +def test_train_custom_model_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_train_custom_model_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchTuningServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchTuningServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchTuningServiceGrpcTransport, + ) + + +def test_search_tuning_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_tuning_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1alpha.services.search_tuning_service.transports.SearchTuningServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "train_custom_model", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_tuning_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1alpha.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_tuning_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1alpha.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport() + adc.assert_called_once() + + +def test_search_tuning_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchTuningServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_search_tuning_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchTuningServiceGrpcTransport, grpc_helpers), + (transports.SearchTuningServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_tuning_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_tuning_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchTuningServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_search_tuning_service_rest_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_no_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_with_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_tuning_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchTuningServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchTuningServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.train_custom_model._session + session2 = client2.transport.train_custom_model._session + assert session1 != session2 + + +def test_search_tuning_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_tuning_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_search_tuning_service_grpc_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_search_tuning_service_grpc_lro_async_client(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SearchTuningServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = SearchTuningServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchTuningServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = SearchTuningServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchTuningServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = SearchTuningServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchTuningServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = SearchTuningServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchTuningServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = SearchTuningServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchTuningServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = SearchTuningServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchTuningServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchTuningServiceClient, transports.SearchTuningServiceGrpcTransport), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py index 4eb2e5a01951..ba20e4c7ac1a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py @@ -44,7 +44,9 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -56,9 +58,13 @@ from google.cloud.discoveryengine_v1alpha.services.site_search_engine_service import ( SiteSearchEngineServiceAsyncClient, SiteSearchEngineServiceClient, + pagers, transports, ) -from google.cloud.discoveryengine_v1alpha.types import site_search_engine_service +from google.cloud.discoveryengine_v1alpha.types import ( + site_search_engine, + site_search_engine_service, +) def client_cert_source_callback(): @@ -763,11 +769,11 @@ def test_site_search_engine_service_client_create_channel_credentials_file( @pytest.mark.parametrize( "request_type", [ - site_search_engine_service.RecrawlUrisRequest, + site_search_engine_service.GetSiteSearchEngineRequest, dict, ], ) -def test_recrawl_uris(request_type, transport: str = "grpc"): +def test_get_site_search_engine(request_type, transport: str = "grpc"): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -778,141 +784,5837 @@ def test_recrawl_uris(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + response = client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + client.get_site_search_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine( + name="name_value", + ) + ) + response = await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async_from_dict(): + await test_get_site_search_engine_async(request_type=dict) + + +def test_get_site_search_engine_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = site_search_engine.SiteSearchEngine() + client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_site_search_engine_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_site_search_engine_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_site_search_engine_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.recrawl_uris(request) + response = client.create_target_site(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == site_search_engine_service.RecrawlUrisRequest() + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + client.create_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_create_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_target_site_async_from_dict(): + await test_create_target_site_async(request_type=dict) + + +def test_create_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_create_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + client.batch_create_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async_from_dict(): + await test_batch_create_target_sites_async(request_type=dict) + + +def test_batch_create_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + response = client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + client.get_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_get_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + ) + response = await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +@pytest.mark.asyncio +async def test_get_target_site_async_from_dict(): + await test_get_target_site_async(request_type=dict) + + +def test_get_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = site_search_engine.TargetSite() + client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + client.update_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_update_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_target_site_async_from_dict(): + await test_update_target_site_async(request_type=dict) + + +def test_update_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +def test_update_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_update_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + client.delete_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_delete_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_target_site_async_from_dict(): + await test_delete_target_site_async(request_type=dict) + + +def test_delete_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + client.list_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_list_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_target_sites_async_from_dict(): + await test_list_target_sites_async(request_type=dict) + + +def test_list_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = site_search_engine_service.ListTargetSitesResponse() + client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_target_sites_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_target_sites_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_target_sites(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_list_target_sites_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.list_target_sites(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_target_sites( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_target_sites(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_enable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + client.enable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async_from_dict(): + await test_enable_advanced_site_search_async(request_type=dict) + + +def test_enable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_disable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + client.disable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async_from_dict(): + await test_disable_advanced_site_search_async(request_type=dict) + + +def test_disable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_recrawl_uris_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + client.recrawl_uris() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + +@pytest.mark.asyncio +async def test_recrawl_uris_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.RecrawlUrisRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_recrawl_uris_async_from_dict(): + await test_recrawl_uris_async(request_type=dict) + + +def test_recrawl_uris_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_recrawl_uris_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_verify_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + client.batch_verify_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async_from_dict(): + await test_batch_verify_target_sites_async(request_type=dict) + + +def test_batch_verify_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.FetchDomainVerificationStatusRequest, + dict, + ], +) +def test_fetch_domain_verification_status(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_fetch_domain_verification_status_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + client.fetch_domain_verification_status() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_from_dict(): + await test_fetch_domain_verification_status_async(request_type=dict) + + +def test_fetch_domain_verification_status_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), + ) + pager = client.fetch_domain_verification_status(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_fetch_domain_verification_status_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_domain_verification_status(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_domain_verification_status( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_domain_verification_status(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetSiteSearchEngineRequest, + dict, + ], +) +def test_get_site_search_engine_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_site_search_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_rest_required_fields( + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_site_search_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_site_search_engine_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_site_search_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_site_search_engine_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( + site_search_engine_service.GetSiteSearchEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.SiteSearchEngine.to_json( + site_search_engine.SiteSearchEngine() + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.SiteSearchEngine() + + client.get_site_search_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_site_search_engine_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_site_search_engine(request) + + +def test_get_site_search_engine_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site_search_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}" + % client.transport._host, + args[1], + ) + + +def test_get_site_search_engine_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +def test_get_site_search_engine_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request_init["target_site"] = { + "name": "name_value", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.CreateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_target_site_rest_required_fields( + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_target_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "targetSite", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( + site_search_engine_service.CreateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.CreateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_target_site(request) + + +def test_create_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_create_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_create_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_create_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + site_search_engine_service.BatchCreateTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.BatchCreateTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_target_sites(request) + + +def test_batch_create_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_target_site(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_rest_required_fields( + request_type=site_search_engine_service.GetTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetTargetSiteRequest.pb( + site_search_engine_service.GetTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.TargetSite.to_json( + site_search_engine.TargetSite() + ) + + request = site_search_engine_service.GetTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.TargetSite() + + client.get_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_target_site(request) + + +def test_get_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_get_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +def test_get_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request_init["target_site"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.UpdateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_target_site_rest_required_fields( + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("targetSite",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( + site_search_engine_service.UpdateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.UpdateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_target_site(request) + + +def test_update_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_update_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_update_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_target_site_rest_required_fields( + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( + site_search_engine_service.DeleteTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DeleteTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_target_site(request) + + +def test_delete_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +def test_delete_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_target_sites(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_rest_required_fields( + request_type=site_search_engine_service.ListTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.ListTargetSitesRequest.pb( + site_search_engine_service.ListTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + site_search_engine_service.ListTargetSitesResponse.to_json( + site_search_engine_service.ListTargetSitesResponse() + ) + ) + + request = site_search_engine_service.ListTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine_service.ListTargetSitesResponse() + + client.list_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_target_sites(request) + + +def test_list_target_sites_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_target_sites(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_list_target_sites_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_rest_pager(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.ListTargetSitesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + pager = client.list_target_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list(client.list_target_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_enable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + site_search_engine_service.EnableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_advanced_site_search(request) + + +def test_enable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_disable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + site_search_engine_service.DisableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_advanced_site_search(request) + + +def test_disable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.recrawl_uris(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_recrawl_uris_rest_required_fields( + request_type=site_search_engine_service.RecrawlUrisRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request_init["uris"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + jsonified_request["uris"] = "uris_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + assert "uris" in jsonified_request + assert jsonified_request["uris"] == "uris_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.recrawl_uris(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_recrawl_uris_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.recrawl_uris._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "siteSearchEngine", + "uris", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recrawl_uris_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.RecrawlUrisRequest.pb( + site_search_engine_service.RecrawlUrisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.RecrawlUrisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.recrawl_uris( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recrawl_uris_rest_bad_request( + transport: str = "rest", request_type=site_search_engine_service.RecrawlUrisRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recrawl_uris(request) + + +def test_recrawl_uris_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_verify_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_verify_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + response = client.batch_verify_target_sites(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_recrawl_uris_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SiteSearchEngineServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + +def test_batch_verify_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: - client.recrawl_uris() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == site_search_engine_service.RecrawlUrisRequest() + unset_fields = transport.batch_verify_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.asyncio -async def test_recrawl_uris_async( - transport: str = "grpc_asyncio", - request_type=site_search_engine_service.RecrawlUrisRequest, -): - client = SiteSearchEngineServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_verify_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_verify_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + site_search_engine_service.BatchVerifyTargetSitesRequest() ) - response = await client.recrawl_uris(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == site_search_engine_service.RecrawlUrisRequest() + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + client.batch_verify_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_recrawl_uris_async_from_dict(): - await test_recrawl_uris_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_recrawl_uris_field_headers(): +def test_batch_verify_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = site_search_engine_service.RecrawlUrisRequest() - - request.site_search_engine = "site_search_engine_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.recrawl_uris(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "site_search_engine=site_search_engine_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_verify_target_sites(request) -@pytest.mark.asyncio -async def test_recrawl_uris_field_headers_async(): - client = SiteSearchEngineServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_batch_verify_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = site_search_engine_service.RecrawlUrisRequest() - - request.site_search_engine = "site_search_engine_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.recrawl_uris(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "site_search_engine=site_search_engine_value", - ) in kw["metadata"] - @pytest.mark.parametrize( "request_type", [ - site_search_engine_service.RecrawlUrisRequest, + site_search_engine_service.FetchDomainVerificationStatusRequest, dict, ], ) -def test_recrawl_uris_rest(request_type): +def test_fetch_domain_verification_status_rest(request_type): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -920,36 +6622,46 @@ def test_recrawl_uris_rest(request_type): # send a request that will satisfy transcoding request_init = { - "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.recrawl_uris(request) + response = client.fetch_domain_verification_status(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_recrawl_uris_rest_required_fields( - request_type=site_search_engine_service.RecrawlUrisRequest, +def test_fetch_domain_verification_status_rest_required_fields( + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, ): transport_class = transports.SiteSearchEngineServiceRestTransport request_init = {} request_init["site_search_engine"] = "" - request_init["uris"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -964,24 +6676,28 @@ def test_recrawl_uris_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).recrawl_uris._get_unset_required_fields(jsonified_request) + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["siteSearchEngine"] = "site_search_engine_value" - jsonified_request["uris"] = "uris_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).recrawl_uris._get_unset_required_fields(jsonified_request) + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "siteSearchEngine" in jsonified_request assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" - assert "uris" in jsonified_request - assert jsonified_request["uris"] == "uris_value" client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -990,7 +6706,7 @@ def test_recrawl_uris_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1002,45 +6718,53 @@ def test_recrawl_uris_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.recrawl_uris(request) + response = client.fetch_domain_verification_status(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_recrawl_uris_rest_unset_required_fields(): +def test_fetch_domain_verification_status_rest_unset_required_fields(): transport = transports.SiteSearchEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.recrawl_uris._get_unset_required_fields({}) + unset_fields = ( + transport.fetch_domain_verification_status._get_unset_required_fields({}) + ) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "siteSearchEngine", - "uris", + "pageSize", + "pageToken", ) ) + & set(("siteSearchEngine",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_recrawl_uris_rest_interceptors(null_interceptor): +def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): transport = transports.SiteSearchEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1053,16 +6777,16 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status", ) as post, mock.patch.object( - transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" + transports.SiteSearchEngineServiceRestInterceptor, + "pre_fetch_domain_verification_status", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = site_search_engine_service.RecrawlUrisRequest.pb( - site_search_engine_service.RecrawlUrisRequest() + pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + site_search_engine_service.FetchDomainVerificationStatusRequest() ) transcode.return_value = { "method": "post", @@ -1074,19 +6798,23 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) ) - request = site_search_engine_service.RecrawlUrisRequest() + request = site_search_engine_service.FetchDomainVerificationStatusRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) - client.recrawl_uris( + client.fetch_domain_verification_status( request, metadata=[ ("key", "val"), @@ -1098,8 +6826,9 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): post.assert_called_once() -def test_recrawl_uris_rest_bad_request( - transport: str = "rest", request_type=site_search_engine_service.RecrawlUrisRequest +def test_fetch_domain_verification_status_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, ): client = SiteSearchEngineServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1108,7 +6837,7 @@ def test_recrawl_uris_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" } request = request_type(**request_init) @@ -1121,14 +6850,76 @@ def test_recrawl_uris_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.recrawl_uris(request) + client.fetch_domain_verification_status(request) -def test_recrawl_uris_rest_error(): +def test_fetch_domain_verification_status_rest_pager(transport: str = "rest"): client = SiteSearchEngineServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + + pager = client.fetch_domain_verification_status(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list( + client.fetch_domain_verification_status(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -1269,7 +7060,18 @@ def test_site_search_engine_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "get_site_search_engine", + "create_target_site", + "batch_create_target_sites", + "get_target_site", + "update_target_site", + "delete_target_site", + "list_target_sites", + "enable_advanced_site_search", + "disable_advanced_site_search", "recrawl_uris", + "batch_verify_target_sites", + "fetch_domain_verification_status", "get_operation", "list_operations", ) @@ -1556,9 +7358,42 @@ def test_site_search_engine_service_client_transport_session_collision(transport credentials=creds2, transport=transport_name, ) + session1 = client1.transport.get_site_search_engine._session + session2 = client2.transport.get_site_search_engine._session + assert session1 != session2 + session1 = client1.transport.create_target_site._session + session2 = client2.transport.create_target_site._session + assert session1 != session2 + session1 = client1.transport.batch_create_target_sites._session + session2 = client2.transport.batch_create_target_sites._session + assert session1 != session2 + session1 = client1.transport.get_target_site._session + session2 = client2.transport.get_target_site._session + assert session1 != session2 + session1 = client1.transport.update_target_site._session + session2 = client2.transport.update_target_site._session + assert session1 != session2 + session1 = client1.transport.delete_target_site._session + session2 = client2.transport.delete_target_site._session + assert session1 != session2 + session1 = client1.transport.list_target_sites._session + session2 = client2.transport.list_target_sites._session + assert session1 != session2 + session1 = client1.transport.enable_advanced_site_search._session + session2 = client2.transport.enable_advanced_site_search._session + assert session1 != session2 + session1 = client1.transport.disable_advanced_site_search._session + session2 = client2.transport.disable_advanced_site_search._session + assert session1 != session2 session1 = client1.transport.recrawl_uris._session session2 = client2.transport.recrawl_uris._session assert session1 != session2 + session1 = client1.transport.batch_verify_target_sites._session + session2 = client2.transport.batch_verify_target_sites._session + assert session1 != session2 + session1 = client1.transport.fetch_domain_verification_status._session + session2 = client2.transport.fetch_domain_verification_status._session + assert session1 != session2 def test_site_search_engine_service_grpc_transport_channel(): @@ -1749,8 +7584,39 @@ def test_parse_site_search_engine_path(): assert expected == actual +def test_target_site_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + target_site = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + actual = SiteSearchEngineServiceClient.target_site_path( + project, location, data_store, target_site + ) + assert expected == actual + + +def test_parse_target_site_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "target_site": "clam", + } + path = SiteSearchEngineServiceClient.target_site_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_target_site_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1760,7 +7626,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = SiteSearchEngineServiceClient.common_billing_account_path(**expected) @@ -1770,7 +7636,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -1780,7 +7646,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = SiteSearchEngineServiceClient.common_folder_path(**expected) @@ -1790,7 +7656,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -1800,7 +7666,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = SiteSearchEngineServiceClient.common_organization_path(**expected) @@ -1810,7 +7676,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -1820,7 +7686,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = SiteSearchEngineServiceClient.common_project_path(**expected) @@ -1830,8 +7696,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -1842,8 +7708,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = SiteSearchEngineServiceClient.common_location_path(**expected) @@ -1900,7 +7766,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -1930,7 +7796,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1963,7 +7829,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -1993,7 +7859,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index 63628f64d36e..6f39a4a65030 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -3247,7 +3247,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -3277,7 +3277,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3310,7 +3310,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -3340,7 +3340,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 943cbff6ecd0..d0926dcdd086 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.21.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.21.0...google-cloud-documentai-v2.21.1) (2023-12-13) + + +### Documentation + +* [google-cloud-documentai] Clarify Properties documentation ([#12118](https://github.com/googleapis/google-cloud-python/issues/12118)) ([35c180e](https://github.com/googleapis/google-cloud-python/commit/35c180e4cb7d9e52b5f72a13b606a6004b5c8a08)) + ## [2.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.20.2...google-cloud-documentai-v2.21.0) (2023-12-07) diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index 91772ebd624b..69ac2116cb35 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index 91772ebd624b..69ac2116cb35 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_schema.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_schema.py index 7c39545bdcc2..4e43b4556fe6 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_schema.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_schema.py @@ -119,14 +119,14 @@ class Property(proto.Message): class OccurrenceType(proto.Enum): r"""Types of occurrences of the entity type in the document. This - represents the number of instances of instances of an entity, not - number of mentions of an entity. For example, a bank statement may - only have one ``account_number``, but this account number may be - mentioned in several places on the document. In this case the - 'account_number' would be considered a ``REQUIRED_ONCE`` entity - type. If, on the other hand, we expect a bank statement to contain - the status of multiple different accounts for the customers, the - occurrence type will be set to ``REQUIRED_MULTIPLE``. + represents the number of instances of an entity, not number of + mentions of an entity. For example, a bank statement may only have + one ``account_number``, but this account number may be mentioned in + several places on the document. In this case the 'account_number' + would be considered a ``REQUIRED_ONCE`` entity type. If, on the + other hand, we expect a bank statement to contain the status of + multiple different accounts for the customers, the occurrence type + will be set to ``REQUIRED_MULTIPLE``. Values: OCCURRENCE_TYPE_UNSPECIFIED (0): diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index 91772ebd624b..69ac2116cb35 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index 91772ebd624b..69ac2116cb35 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 926da9fe457e..3ea560e105da 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.21.0" + "version": "2.21.1" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index 4a7b7653d7e7..a1a261bfb0bc 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.21.0" + "version": "2.21.1" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 61463ca4fe26..8e5787c9fac5 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.21.0" + "version": "2.21.1" }, "snippets": [ { diff --git a/packages/google-cloud-edgenetwork/CHANGELOG.md b/packages/google-cloud-edgenetwork/CHANGELOG.md index 90506ab8acd3..c5a65e2edd55 100644 --- a/packages/google-cloud-edgenetwork/CHANGELOG.md +++ b/packages/google-cloud-edgenetwork/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.1...google-cloud-edgenetwork-v0.1.2) (2024-01-24) + + +### Features + +* [google-cloud-edgenetwork] add MACsec status for internal links ([#12213](https://github.com/googleapis/google-cloud-python/issues/12213)) ([313f567](https://github.com/googleapis/google-cloud-python/commit/313f5672c1d16681dd4db2c4a995c5668259ea7d)) + ## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.0...google-cloud-edgenetwork-v0.1.1) (2023-12-07) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 123d60293175..cf99f3acb1ee 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.1.2" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 123d60293175..cf99f3acb1ee 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.1" # {x-release-please-version} +__version__ = "0.1.2" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/service.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/service.py index 9e4924cc9d4f..eba0e0837622 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/service.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/service.py @@ -1158,13 +1158,39 @@ class NetworkStatus(proto.Message): subnet_status (MutableSequence[google.cloud.edgenetwork_v1.types.SubnetStatus]): A list of status for the subnets under the current network. + macsec_status_internal_links (google.cloud.edgenetwork_v1.types.DiagnoseNetworkResponse.NetworkStatus.MacsecStatus): + The MACsec status of internal links. """ + class MacsecStatus(proto.Enum): + r"""Denotes the status of MACsec sessions for the links of a + zone. + + Values: + MACSEC_STATUS_UNSPECIFIED (0): + MACsec status not specified, likely due to + missing metrics. + SECURE (1): + All relevant links have at least one MACsec + session up. + UNSECURE (2): + At least one relevant link does not have any + MACsec sessions up. + """ + MACSEC_STATUS_UNSPECIFIED = 0 + SECURE = 1 + UNSECURE = 2 + subnet_status: MutableSequence[resources.SubnetStatus] = proto.RepeatedField( proto.MESSAGE, number=1, message=resources.SubnetStatus, ) + macsec_status_internal_links: "DiagnoseNetworkResponse.NetworkStatus.MacsecStatus" = proto.Field( + proto.ENUM, + number=2, + enum="DiagnoseNetworkResponse.NetworkStatus.MacsecStatus", + ) update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-edgenetwork/noxfile.py b/packages/google-cloud-edgenetwork/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-edgenetwork/noxfile.py +++ b/packages/google-cloud-edgenetwork/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index 495df8896361..8097c43ca4ef 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.1" + "version": "0.1.2" }, "snippets": [ { diff --git a/packages/google-cloud-functions/CHANGELOG.md b/packages/google-cloud-functions/CHANGELOG.md index 011080112b86..9b0564160b65 100644 --- a/packages/google-cloud-functions/CHANGELOG.md +++ b/packages/google-cloud-functions/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-functions-v1.14.0...google-cloud-functions-v1.15.0) (2024-01-24) + + +### Features + +* Add fields for automatic runtime updates ([82e676d](https://github.com/googleapis/google-cloud-python/commit/82e676dd9a49d54a88fe37c264b8b0145d2ca147)) +* Add optional parameter `version_id` to `GetFunctionRequest` ([82e676d](https://github.com/googleapis/google-cloud-python/commit/82e676dd9a49d54a88fe37c264b8b0145d2ca147)) + + +### Documentation + +* Deprecate `network` field of `CloudFunction` ([82e676d](https://github.com/googleapis/google-cloud-python/commit/82e676dd9a49d54a88fe37c264b8b0145d2ca147)) +* Minor updates in comments throughout ([82e676d](https://github.com/googleapis/google-cloud-python/commit/82e676dd9a49d54a88fe37c264b8b0145d2ca147)) +* Update description for docker_registry to reflect transition to Artifact Registry ([82e676d](https://github.com/googleapis/google-cloud-python/commit/82e676dd9a49d54a88fe37c264b8b0145d2ca147)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-functions-v1.13.3...google-cloud-functions-v1.14.0) (2023-12-07) diff --git a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py index 7e53c40f61cc..ae06408a02c4 100644 --- a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py index 7e53c40f61cc..ae06408a02c4 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py index b7901c846e03..181e648e8a45 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/async_client.py @@ -398,7 +398,7 @@ async def sample_get_function(): google.cloud.functions_v1.types.CloudFunction: Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates + response to an event. It encapsulate function and triggers configurations. """ @@ -466,7 +466,7 @@ async def create_function( ) -> operation_async.AsyncOperation: r"""Creates a new function. If a function with the given name already exists in the specified project, the long running - operation returns an ``ALREADY_EXISTS`` error. + operation will return ``ALREADY_EXISTS`` error. .. code-block:: python @@ -529,7 +529,7 @@ async def sample_create_function(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates function and + response to an event. It encapsulate function and triggers configurations. """ @@ -650,7 +650,7 @@ async def sample_update_function(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates function and + response to an event. It encapsulate function and triggers configurations. """ @@ -727,7 +727,8 @@ async def delete_function( ) -> operation_async.AsyncOperation: r"""Deletes a function with the given name from the specified project. If the given function is used by some - trigger, the trigger is updated to remove this function. + trigger, the trigger will be updated to remove this + function. .. code-block:: python @@ -991,13 +992,13 @@ async def generate_upload_url( credentials would be used, but that identity does not have permissions to upload files to the URL. - When making an HTTP PUT request, these two headers must be + When making a HTTP PUT request, these two headers need to be specified: - ``content-type: application/zip`` - ``x-goog-content-length-range: 0,104857600`` - And this header must NOT be specified: + And this header SHOULD NOT be specified: - ``Authorization: Bearer YOUR_TOKEN`` @@ -1077,9 +1078,9 @@ async def generate_download_url( ) -> functions.GenerateDownloadUrlResponse: r"""Returns a signed URL for downloading deployed function source code. The URL is only valid for a - limited period and must be used within minutes after + limited period and should be used within minutes after generation. - For more information about the signed URL usage, see: + For more information about the signed URL usage see: https://cloud.google.com/storage/docs/access-control/signed-urls @@ -1383,8 +1384,8 @@ async def test_iam_permissions( metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this - returns an empty set of permissions, not a NOT_FOUND error. + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. .. code-block:: python diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py index 2238ae764c7b..e0e17d67f39d 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py @@ -653,7 +653,7 @@ def sample_get_function(): google.cloud.functions_v1.types.CloudFunction: Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates + response to an event. It encapsulate function and triggers configurations. """ @@ -711,7 +711,7 @@ def create_function( ) -> operation.Operation: r"""Creates a new function. If a function with the given name already exists in the specified project, the long running - operation returns an ``ALREADY_EXISTS`` error. + operation will return ``ALREADY_EXISTS`` error. .. code-block:: python @@ -774,7 +774,7 @@ def sample_create_function(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates function and + response to an event. It encapsulate function and triggers configurations. """ @@ -895,7 +895,7 @@ def sample_update_function(): An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates function and + response to an event. It encapsulate function and triggers configurations. """ @@ -962,7 +962,8 @@ def delete_function( ) -> operation.Operation: r"""Deletes a function with the given name from the specified project. If the given function is used by some - trigger, the trigger is updated to remove this function. + trigger, the trigger will be updated to remove this + function. .. code-block:: python @@ -1216,13 +1217,13 @@ def generate_upload_url( credentials would be used, but that identity does not have permissions to upload files to the URL. - When making an HTTP PUT request, these two headers must be + When making a HTTP PUT request, these two headers need to be specified: - ``content-type: application/zip`` - ``x-goog-content-length-range: 0,104857600`` - And this header must NOT be specified: + And this header SHOULD NOT be specified: - ``Authorization: Bearer YOUR_TOKEN`` @@ -1303,9 +1304,9 @@ def generate_download_url( ) -> functions.GenerateDownloadUrlResponse: r"""Returns a signed URL for downloading deployed function source code. The URL is only valid for a - limited period and must be used within minutes after + limited period and should be used within minutes after generation. - For more information about the signed URL usage, see: + For more information about the signed URL usage see: https://cloud.google.com/storage/docs/access-control/signed-urls @@ -1608,8 +1609,8 @@ def test_iam_permissions( metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this - returns an empty set of permissions, not a NOT_FOUND error. + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. .. code-block:: python diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py index 1bb29cf8359f..dcffa3341a74 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py @@ -310,7 +310,7 @@ def create_function( Creates a new function. If a function with the given name already exists in the specified project, the long running - operation returns an ``ALREADY_EXISTS`` error. + operation will return ``ALREADY_EXISTS`` error. Returns: Callable[[~.CreateFunctionRequest], @@ -364,7 +364,8 @@ def delete_function( Deletes a function with the given name from the specified project. If the given function is used by some - trigger, the trigger is updated to remove this function. + trigger, the trigger will be updated to remove this + function. Returns: Callable[[~.DeleteFunctionRequest], @@ -440,13 +441,13 @@ def generate_upload_url( credentials would be used, but that identity does not have permissions to upload files to the URL. - When making an HTTP PUT request, these two headers must be + When making a HTTP PUT request, these two headers need to be specified: - ``content-type: application/zip`` - ``x-goog-content-length-range: 0,104857600`` - And this header must NOT be specified: + And this header SHOULD NOT be specified: - ``Authorization: Bearer YOUR_TOKEN`` @@ -478,9 +479,9 @@ def generate_download_url( Returns a signed URL for downloading deployed function source code. The URL is only valid for a - limited period and must be used within minutes after + limited period and should be used within minutes after generation. - For more information about the signed URL usage, see: + For more information about the signed URL usage see: https://cloud.google.com/storage/docs/access-control/signed-urls @@ -567,8 +568,8 @@ def test_iam_permissions( r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this - returns an empty set of permissions, not a NOT_FOUND error. + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. Returns: Callable[[~.TestIamPermissionsRequest], diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py index 858fe79bc5a3..785aba359d86 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py @@ -319,7 +319,7 @@ def create_function( Creates a new function. If a function with the given name already exists in the specified project, the long running - operation returns an ``ALREADY_EXISTS`` error. + operation will return ``ALREADY_EXISTS`` error. Returns: Callable[[~.CreateFunctionRequest], @@ -377,7 +377,8 @@ def delete_function( Deletes a function with the given name from the specified project. If the given function is used by some - trigger, the trigger is updated to remove this function. + trigger, the trigger will be updated to remove this + function. Returns: Callable[[~.DeleteFunctionRequest], @@ -456,13 +457,13 @@ def generate_upload_url( credentials would be used, but that identity does not have permissions to upload files to the URL. - When making an HTTP PUT request, these two headers must be + When making a HTTP PUT request, these two headers need to be specified: - ``content-type: application/zip`` - ``x-goog-content-length-range: 0,104857600`` - And this header must NOT be specified: + And this header SHOULD NOT be specified: - ``Authorization: Bearer YOUR_TOKEN`` @@ -495,9 +496,9 @@ def generate_download_url( Returns a signed URL for downloading deployed function source code. The URL is only valid for a - limited period and must be used within minutes after + limited period and should be used within minutes after generation. - For more information about the signed URL usage, see: + For more information about the signed URL usage see: https://cloud.google.com/storage/docs/access-control/signed-urls @@ -584,8 +585,8 @@ def test_iam_permissions( r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this - returns an empty set of permissions, not a NOT_FOUND error. + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. Returns: Callable[[~.TestIamPermissionsRequest], diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py index d9c9cf8ccf53..9d5a77ba1a78 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py @@ -1112,7 +1112,7 @@ def __call__( ~.functions.CloudFunction: Describes a Cloud Function that contains user computation executed in - response to an event. It encapsulates + response to an event. It encapsulate function and triggers configurations. """ diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/types/functions.py b/packages/google-cloud-functions/google/cloud/functions_v1/types/functions.py index e640a9385378..68868c345599 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/types/functions.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/types/functions.py @@ -81,7 +81,7 @@ class CloudFunctionStatus(proto.Enum): class CloudFunction(proto.Message): r"""Describes a Cloud Function that contains user computation - executed in response to an event. It encapsulates function and + executed in response to an event. It encapsulate function and triggers configurations. This message has `oneof`_ fields (mutually exclusive fields). @@ -110,7 +110,7 @@ class CloudFunction(proto.Message): This field is a member of `oneof`_ ``source_code``. source_upload_url (str): - The Google Cloud Storage-signed URL used for source + The Google Cloud Storage signed URL used for source uploading, generated by calling [google.cloud.functions.v1.GenerateUploadUrl]. @@ -133,13 +133,10 @@ class CloudFunction(proto.Message): Output only. Status of the function deployment. entry_point (str): - The name of the function (as defined in source code) that is - executed. Defaults to the resource name suffix, if not - specified. For backward compatibility, if function with - given name is not found, the system tries to use the - function named "function". For Node.js, this is the name of - a function exported by the module as specified in - ``source_location``. + The name of the function (as defined in + source code) that will be executed. Defaults to + the resource name suffix (ID of the function), + if not specified. runtime (str): The runtime in which to run the function. Required when deploying a new function, optional when updating an existing @@ -173,32 +170,16 @@ class CloudFunction(proto.Message): Build environment variables that shall be available during build time. network (str): - The Serverless VPC Access connector that this cloud function - can connect to. It can be either the fully qualified URI, or - the short name of the connector resource. If the connector - name is used, the connector must belong to the same project - as the function. Otherwise, it must belong to a project - within the same organization. The format of this field is - either ``projects/{project}/global/networks/{network}`` or - ``{network}``, where ``{project}`` is a project id where the - network is defined, and ``{network}`` is the short name of - the network. - - This field is mutually exclusive with ``vpc_connector`` and - will be replaced by it. - - See `the VPC - documentation `__ - for more information on connecting Cloud projects. + Deprecated: use vpc_connector max_instances (int): The limit on the maximum number of function instances that - can coexist at a given time. + may coexist at a given time. In some cases, such as rapid traffic surges, Cloud Functions - can for a short period of time create more instances than + may, for a short period of time, create more instances than the specified max instances limit. If your function cannot - tolerate this temporary behavior, you might want to factor - in a safety margin and set a lower max instances value than + tolerate this temporary behavior, you may want to factor in + a safety margin and set a lower max instances value than your function can tolerate. See the `Max @@ -206,10 +187,10 @@ class CloudFunction(proto.Message): Guide for more details. min_instances (int): A lower bound for the number function - instances that can coexist at a given time. + instances that may coexist at a given time. vpc_connector (str): The VPC Network Connector that this cloud function can - connect to. It can be either the fully qualified URI, or the + connect to. It can be either the fully-qualified URI, or the short name of the network connector resource. The format of this field is ``projects/*/locations/*/connectors/*`` @@ -292,7 +273,7 @@ class CloudFunction(proto.Message): unspecified and the deployment is eligible to use Artifact Registry, GCF will create and use a repository named 'gcf-artifacts' for every deployed region. This is the - repository to which the function docker image is pushed + repository to which the function docker image will be pushed after it is built by Cloud Build. It must match the pattern @@ -304,28 +285,36 @@ class CloudFunction(proto.Message): docker_registry (google.cloud.functions_v1.types.CloudFunction.DockerRegistry): Docker Registry to use for this deployment. - If ``docker_repository`` field is specified, this field is - automatically set as ``ARTIFACT_REGISTRY``. If unspecified, - it currently defaults to ``CONTAINER_REGISTRY``. This field - may be overridden by the backend for eligible deployments. + If unspecified, it defaults to ``ARTIFACT_REGISTRY``. If + ``docker_repository`` field is specified, this field should + either be left unspecified or set to ``ARTIFACT_REGISTRY``. + automatic_update_policy (google.cloud.functions_v1.types.CloudFunction.AutomaticUpdatePolicy): + See the comment next to this message for more + details. + + This field is a member of `oneof`_ ``runtime_update_policy``. + on_deploy_update_policy (google.cloud.functions_v1.types.CloudFunction.OnDeployUpdatePolicy): + See the comment next to this message for more + details. + + This field is a member of `oneof`_ ``runtime_update_policy``. """ class VpcConnectorEgressSettings(proto.Enum): r"""Available egress settings. - This controls what traffic is diverted through the Serverless VPC - Access connector resource. By default, PRIVATE_RANGES_ONLY is used. + This controls what traffic is diverted through the VPC Access + Connector resource. By default PRIVATE_RANGES_ONLY will be used. Values: VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED (0): Unspecified. PRIVATE_RANGES_ONLY (1): - Use the Serverless VPC Access connector only - for private IP space from RFC1918. + Use the VPC Access Connector only for private + IP space from RFC1918. ALL_TRAFFIC (2): - Force the use of Serverless VPC Access - connector for all egress traffic from the - function. + Force the use of VPC Access Connector for all + egress traffic from the function. """ VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED = 0 PRIVATE_RANGES_ONLY = 1 @@ -336,7 +325,7 @@ class IngressSettings(proto.Enum): This controls what traffic can reach the function. - If unspecified, ALLOW_ALL is used. + If unspecified, ALLOW_ALL will be used. Values: INGRESS_SETTINGS_UNSPECIFIED (0): @@ -363,20 +352,42 @@ class DockerRegistry(proto.Enum): DOCKER_REGISTRY_UNSPECIFIED (0): Unspecified. CONTAINER_REGISTRY (1): - Docker images are stored in multi-regional Container + Docker images will be stored in multi-regional Container Registry repositories named ``gcf``. ARTIFACT_REGISTRY (2): - Docker images are stored in regional Artifact Registry - repositories. By default, Cloud Functions creates and uses + Docker images will be stored in regional Artifact Registry + repositories. By default, GCF will create and use repositories named ``gcf-artifacts`` in every region in which a function is deployed. But the repository to use can - also be specified by the user by using the + also be specified by the user using the ``docker_repository`` field. """ DOCKER_REGISTRY_UNSPECIFIED = 0 CONTAINER_REGISTRY = 1 ARTIFACT_REGISTRY = 2 + class AutomaticUpdatePolicy(proto.Message): + r"""Security patches are applied automatically to the runtime + without requiring the function to be redeployed. + + """ + + class OnDeployUpdatePolicy(proto.Message): + r"""Security patches are only applied when a function is + redeployed. + + Attributes: + runtime_version (str): + Output only. contains the runtime version + which was used during latest function + deployment. + """ + + runtime_version: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -528,6 +539,18 @@ class DockerRegistry(proto.Enum): number=35, enum=DockerRegistry, ) + automatic_update_policy: AutomaticUpdatePolicy = proto.Field( + proto.MESSAGE, + number=40, + oneof="runtime_update_policy", + message=AutomaticUpdatePolicy, + ) + on_deploy_update_policy: OnDeployUpdatePolicy = proto.Field( + proto.MESSAGE, + number=41, + oneof="runtime_update_policy", + message=OnDeployUpdatePolicy, + ) class SourceRepository(proto.Message): @@ -548,7 +571,7 @@ class SourceRepository(proto.Message): alias. To refer to a specific fixed alias (tag): ``https://source.developers.google.com/projects/*/repos/*/fixed-aliases/*/paths/*`` - You can omit ``paths/*`` if you want to use the main + You may omit ``paths/*`` if you want to use the main directory. deployed_url (str): Output only. The URL pointing to the hosted @@ -580,11 +603,11 @@ class HttpsTrigger(proto.Message): """ class SecurityLevel(proto.Enum): - r"""Available security-level settings. + r"""Available security level settings. This controls the methods to enforce security (HTTPS) on a URL. - If unspecified, SECURE_OPTIONAL is used. + If unspecified, SECURE_OPTIONAL will be used. Values: SECURITY_LEVEL_UNSPECIFIED (0): @@ -616,8 +639,8 @@ class SecurityLevel(proto.Enum): class EventTrigger(proto.Message): - r"""Describes EventTrigger, used to request that events be sent - from another service. + r"""Describes EventTrigger, used to request events be sent from + another service. Attributes: event_type (str): @@ -635,7 +658,7 @@ class EventTrigger(proto.Message): includes the type ``object``. 3. action: The action that generates the event. For example, action for a Google Cloud Storage Object is 'change'. - These parts are lowercase. + These parts are lower case. resource (str): Required. The resource(s) from which to observe events, for example, ``projects/_/buckets/myBucket``. @@ -653,8 +676,8 @@ class EventTrigger(proto.Message): have a resource that matches Google Cloud Pub/Sub topics. Additionally, some services may support short names when - creating an ``EventTrigger``. These are always returned in - the normalized "long" format. + creating an ``EventTrigger``. These will always be returned + in the normalized "long" format. See each *service's* documentation for supported formats. service (str): @@ -689,7 +712,7 @@ class EventTrigger(proto.Message): class FailurePolicy(proto.Message): r"""Describes the policy in case of function's execution failure. - If empty, then defaults to ignoring failures (i.e., not retrying + If empty, then defaults to ignoring failures (i.e. not retrying them). @@ -697,17 +720,17 @@ class FailurePolicy(proto.Message): Attributes: retry (google.cloud.functions_v1.types.FailurePolicy.Retry): - If specified, the function is retried in case - of a failure. + If specified, then the function will be + retried in case of a failure. This field is a member of `oneof`_ ``action``. """ class Retry(proto.Message): r"""Describes the retry policy in case of function's execution - failure. A function execution is retried on any failure. A - failed execution is retried up to 7 days with an exponential - backoff (capped at 10 seconds). + failure. A function execution will be retried on any failure. A + failed execution will be retried up to 7 days with an + exponential backoff (capped at 10 seconds). Retried execution is charged as any other execution. """ @@ -722,8 +745,8 @@ class Retry(proto.Message): class SecretEnvVar(proto.Message): r"""Configuration for a secret environment variable. It has the - information necessary to fetch the secret value from Secret - Manager and expose it as an environment variable. + information necessary to fetch the secret value from secret + manager and expose it as an environment variable. Attributes: key (str): @@ -732,11 +755,11 @@ class SecretEnvVar(proto.Message): Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it - is populated with the function's project, + will be populated with the function's project assuming that the secret exists in the same - project as the function. + project as of the function. secret (str): - Name of the secret in Secret Manager (not the + Name of the secret in secret manager (not the full resource name). version (str): Version of the secret (version number or the @@ -766,19 +789,20 @@ class SecretEnvVar(proto.Message): class SecretVolume(proto.Message): r"""Configuration for a secret volume. It has the information - necessary to fetch the secret value from Secret Manager and make + necessary to fetch the secret value from secret manager and make it available as files mounted at the requested paths within the application container. Secret value is not a part of the - configuration. Every file system read operation performs a - lookup in Secret Manager to retrieve the secret value. + configuration. Every filesystem read operation performs a lookup + in secret manager to retrieve the secret value. Attributes: mount_path (str): The path within the container to mount the secret volume. For example, setting the mount_path as ``/etc/secrets`` - mounts the secret value files under the ``/etc/secrets`` - directory. This directory is also completely shadowed and - unavailable to mount any other secrets. + would mount the secret value files under the + ``/etc/secrets`` directory. This directory will also be + completely shadowed and unavailable to mount any other + secrets. Recommended mount paths: /etc/secrets Restricted mount paths: /cloudsql, /dev/log, /pod, /proc, /var/log @@ -786,16 +810,16 @@ class SecretVolume(proto.Message): Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it - is populated with the function's project, + will be populated with the function's project assuming that the secret exists in the same - project as the function. + project as of the function. secret (str): - Name of the secret in Secret Manager (not the + Name of the secret in secret manager (not the full resource name). versions (MutableSequence[google.cloud.functions_v1.types.SecretVolume.SecretVersion]): List of secret versions to mount for this secret. If empty, - the ``latest`` version of the secret is made available in a - file named after the secret under the mount point. + the ``latest`` version of the secret will be made available + in a file named after the secret under the mount point. """ class SecretVersion(proto.Message): @@ -809,10 +833,10 @@ class SecretVersion(proto.Message): immediately. path (str): Relative path of the file under the mount path where the - secret value for this version is fetched and made available. - For example, setting the mount_path as '/etc/secrets' and - path as ``/secret_foo`` mounts the secret value file at - ``/etc/secrets/secret_foo``. + secret value for this version will be fetched and made + available. For example, setting the mount_path as + '/etc/secrets' and path as ``/secret_foo`` would mount the + secret value file at ``/etc/secrets/secret_foo``. """ version: str = proto.Field( @@ -896,12 +920,27 @@ class GetFunctionRequest(proto.Message): name (str): Required. The name of the function which details should be obtained. + version_id (int): + Optional. The optional version of the + function whose details should be obtained. The + version of a 1st Gen function is an integer that + starts from 1 and gets incremented on + redeployments. Each deployment creates a config + version of the underlying function. GCF may keep + historical configs for old versions. This field + can be specified to fetch the historical + configs. Leave it blank or set to 0 to get the + latest version of the function. """ name: str = proto.Field( proto.STRING, number=1, ) + version_id: int = proto.Field( + proto.INT64, + number=2, + ) class ListFunctionsRequest(proto.Message): @@ -1052,13 +1091,12 @@ class GenerateUploadUrlRequest(proto.Message): ``projects/*/locations/*``. kms_key_name (str): Resource name of a KMS crypto key (managed by the user) used - to encrypt/decrypt function source code objects in staging - Cloud Storage buckets. When you generate an upload url and - upload your source code, it gets copied to a staging Cloud - Storage bucket in an internal regional project. The source - code is then copied to a versioned directory in the sources - bucket in the consumer project during the function - deployment. + to encrypt/decrypt function source code objects in + intermediate Cloud Storage buckets. When you generate an + upload url and upload your source code, it gets copied to an + intermediate Cloud Storage bucket. The source code is then + copied to a versioned directory in the sources bucket in the + consumer project during the function deployment. It must match the pattern ``projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}``. diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py index 7e53c40f61cc..ae06408a02c4 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/noxfile.py b/packages/google-cloud-functions/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-functions/noxfile.py +++ b/packages/google-cloud-functions/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json index 9c54f38a0e1e..1cf26eb30175 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json index 9afd09368259..36d43d148ffa 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/scripts/fixup_functions_v1_keywords.py b/packages/google-cloud-functions/scripts/fixup_functions_v1_keywords.py index 5e2a477bdfac..23d1b3f0b7c7 100644 --- a/packages/google-cloud-functions/scripts/fixup_functions_v1_keywords.py +++ b/packages/google-cloud-functions/scripts/fixup_functions_v1_keywords.py @@ -44,7 +44,7 @@ class functionsCallTransformer(cst.CSTTransformer): 'delete_function': ('name', ), 'generate_download_url': ('name', 'version_id', ), 'generate_upload_url': ('parent', 'kms_key_name', ), - 'get_function': ('name', ), + 'get_function': ('name', 'version_id', ), 'get_iam_policy': ('resource', 'options', ), 'list_functions': ('parent', 'page_size', 'page_token', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py index b8e93c9df74a..87de2216f376 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py @@ -3517,6 +3517,8 @@ def test_get_function_rest_required_fields(request_type=functions.GetFunctionReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_function._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3570,7 +3572,7 @@ def test_get_function_rest_unset_required_fields(): ) unset_fields = transport.get_function._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("versionId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3784,6 +3786,8 @@ def test_create_function_rest(request_type): "source_token": "source_token_value", "docker_repository": "docker_repository_value", "docker_registry": 1, + "automatic_update_policy": {}, + "on_deploy_update_policy": {"runtime_version": "runtime_version_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -4181,6 +4185,8 @@ def test_update_function_rest(request_type): "source_token": "source_token_value", "docker_repository": "docker_repository_value", "docker_registry": 1, + "automatic_update_policy": {}, + "on_deploy_update_policy": {"runtime_version": "runtime_version_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-gke-multicloud/CHANGELOG.md b/packages/google-cloud-gke-multicloud/CHANGELOG.md index 6a22186ba9db..446993bf4eb9 100644 --- a/packages/google-cloud-gke-multicloud/CHANGELOG.md +++ b/packages/google-cloud-gke-multicloud/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.6.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.4...google-cloud-gke-multicloud-v0.6.5) (2024-01-04) + + +### Features + +* added Binary Authorization support which is a deploy-time security control that ensures only trusted container images are deployed ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added force-deletion support for AWS Clusters & Node Pools ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added proxy support for Attached Clusters ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added support for a new admin-groups flag in the create and update APIs ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added support for EC2 Spot instance types for AWS Node Pools ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added support for per-node-pool subnet security group rules for AWS Node Pools ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* added Surge Update and Rollback support for AWS Node Pools ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) +* expanded Kubernetes version info ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) + + +### Documentation + +* updated comments of existing fields ([30f5d0e](https://github.com/googleapis/google-cloud-python/commit/30f5d0ef8ee52c3a30f1cdd166f69d76c0a3366a)) + ## [0.6.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.3...google-cloud-gke-multicloud-v0.6.4) (2023-12-07) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py index 4550547113c5..a05d87eef42d 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py @@ -39,15 +39,20 @@ from google.cloud.gke_multicloud_v1.types.attached_resources import ( AttachedCluster, AttachedClusterError, + AttachedClusterGroup, AttachedClustersAuthorization, AttachedClusterUser, AttachedOidcConfig, AttachedPlatformVersionInfo, + AttachedProxyConfig, AttachedServerConfig, + KubernetesSecret, ) from google.cloud.gke_multicloud_v1.types.attached_service import ( CreateAttachedClusterRequest, DeleteAttachedClusterRequest, + GenerateAttachedClusterAgentTokenRequest, + GenerateAttachedClusterAgentTokenResponse, GenerateAttachedClusterInstallManifestRequest, GenerateAttachedClusterInstallManifestResponse, GetAttachedClusterRequest, @@ -62,22 +67,29 @@ AwsAutoscalingGroupMetricsCollection, AwsCluster, AwsClusterError, + AwsClusterGroup, AwsClusterNetworking, AwsClusterUser, AwsConfigEncryption, AwsControlPlane, AwsDatabaseEncryption, AwsInstancePlacement, + AwsJsonWebKeys, AwsK8sVersionInfo, AwsNodeConfig, + AwsNodeManagement, AwsNodePool, AwsNodePoolAutoscaling, AwsNodePoolError, + AwsOpenIdConfig, AwsProxyConfig, AwsServerConfig, AwsServicesAuthentication, AwsSshConfig, AwsVolumeTemplate, + SpotConfig, + SurgeSettings, + UpdateSettings, ) from google.cloud.gke_multicloud_v1.types.aws_service import ( CreateAwsClusterRequest, @@ -86,13 +98,18 @@ DeleteAwsNodePoolRequest, GenerateAwsAccessTokenRequest, GenerateAwsAccessTokenResponse, + GenerateAwsClusterAgentTokenRequest, + GenerateAwsClusterAgentTokenResponse, GetAwsClusterRequest, + GetAwsJsonWebKeysRequest, GetAwsNodePoolRequest, + GetAwsOpenIdConfigRequest, GetAwsServerConfigRequest, ListAwsClustersRequest, ListAwsClustersResponse, ListAwsNodePoolsRequest, ListAwsNodePoolsResponse, + RollbackAwsNodePoolUpdateRequest, UpdateAwsClusterRequest, UpdateAwsNodePoolRequest, ) @@ -101,6 +118,7 @@ AzureClient, AzureCluster, AzureClusterError, + AzureClusterGroup, AzureClusterNetworking, AzureClusterResources, AzureClusterUser, @@ -108,11 +126,14 @@ AzureControlPlane, AzureDatabaseEncryption, AzureDiskTemplate, + AzureJsonWebKeys, AzureK8sVersionInfo, AzureNodeConfig, + AzureNodeManagement, AzureNodePool, AzureNodePoolAutoscaling, AzureNodePoolError, + AzureOpenIdConfig, AzureProxyConfig, AzureServerConfig, AzureServicesAuthentication, @@ -128,9 +149,13 @@ DeleteAzureNodePoolRequest, GenerateAzureAccessTokenRequest, GenerateAzureAccessTokenResponse, + GenerateAzureClusterAgentTokenRequest, + GenerateAzureClusterAgentTokenResponse, GetAzureClientRequest, GetAzureClusterRequest, + GetAzureJsonWebKeysRequest, GetAzureNodePoolRequest, + GetAzureOpenIdConfigRequest, GetAzureServerConfigRequest, ListAzureClientsRequest, ListAzureClientsResponse, @@ -142,7 +167,9 @@ UpdateAzureNodePoolRequest, ) from google.cloud.gke_multicloud_v1.types.common_resources import ( + BinaryAuthorization, Fleet, + Jwk, LoggingComponentConfig, LoggingConfig, ManagedPrometheusConfig, @@ -162,13 +189,18 @@ "AzureClustersAsyncClient", "AttachedCluster", "AttachedClusterError", + "AttachedClusterGroup", "AttachedClustersAuthorization", "AttachedClusterUser", "AttachedOidcConfig", "AttachedPlatformVersionInfo", + "AttachedProxyConfig", "AttachedServerConfig", + "KubernetesSecret", "CreateAttachedClusterRequest", "DeleteAttachedClusterRequest", + "GenerateAttachedClusterAgentTokenRequest", + "GenerateAttachedClusterAgentTokenResponse", "GenerateAttachedClusterInstallManifestRequest", "GenerateAttachedClusterInstallManifestResponse", "GetAttachedClusterRequest", @@ -181,41 +213,54 @@ "AwsAutoscalingGroupMetricsCollection", "AwsCluster", "AwsClusterError", + "AwsClusterGroup", "AwsClusterNetworking", "AwsClusterUser", "AwsConfigEncryption", "AwsControlPlane", "AwsDatabaseEncryption", "AwsInstancePlacement", + "AwsJsonWebKeys", "AwsK8sVersionInfo", "AwsNodeConfig", + "AwsNodeManagement", "AwsNodePool", "AwsNodePoolAutoscaling", "AwsNodePoolError", + "AwsOpenIdConfig", "AwsProxyConfig", "AwsServerConfig", "AwsServicesAuthentication", "AwsSshConfig", "AwsVolumeTemplate", + "SpotConfig", + "SurgeSettings", + "UpdateSettings", "CreateAwsClusterRequest", "CreateAwsNodePoolRequest", "DeleteAwsClusterRequest", "DeleteAwsNodePoolRequest", "GenerateAwsAccessTokenRequest", "GenerateAwsAccessTokenResponse", + "GenerateAwsClusterAgentTokenRequest", + "GenerateAwsClusterAgentTokenResponse", "GetAwsClusterRequest", + "GetAwsJsonWebKeysRequest", "GetAwsNodePoolRequest", + "GetAwsOpenIdConfigRequest", "GetAwsServerConfigRequest", "ListAwsClustersRequest", "ListAwsClustersResponse", "ListAwsNodePoolsRequest", "ListAwsNodePoolsResponse", + "RollbackAwsNodePoolUpdateRequest", "UpdateAwsClusterRequest", "UpdateAwsNodePoolRequest", "AzureAuthorization", "AzureClient", "AzureCluster", "AzureClusterError", + "AzureClusterGroup", "AzureClusterNetworking", "AzureClusterResources", "AzureClusterUser", @@ -223,11 +268,14 @@ "AzureControlPlane", "AzureDatabaseEncryption", "AzureDiskTemplate", + "AzureJsonWebKeys", "AzureK8sVersionInfo", "AzureNodeConfig", + "AzureNodeManagement", "AzureNodePool", "AzureNodePoolAutoscaling", "AzureNodePoolError", + "AzureOpenIdConfig", "AzureProxyConfig", "AzureServerConfig", "AzureServicesAuthentication", @@ -241,9 +289,13 @@ "DeleteAzureNodePoolRequest", "GenerateAzureAccessTokenRequest", "GenerateAzureAccessTokenResponse", + "GenerateAzureClusterAgentTokenRequest", + "GenerateAzureClusterAgentTokenResponse", "GetAzureClientRequest", "GetAzureClusterRequest", + "GetAzureJsonWebKeysRequest", "GetAzureNodePoolRequest", + "GetAzureOpenIdConfigRequest", "GetAzureServerConfigRequest", "ListAzureClientsRequest", "ListAzureClientsResponse", @@ -253,7 +305,9 @@ "ListAzureNodePoolsResponse", "UpdateAzureClusterRequest", "UpdateAzureNodePoolRequest", + "BinaryAuthorization", "Fleet", + "Jwk", "LoggingComponentConfig", "LoggingConfig", "ManagedPrometheusConfig", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index 76bb756dc95d..7257802240c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.4" # {x-release-please-version} +__version__ = "0.6.5" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py index 7d0159af5ae3..788ad18e67a2 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py @@ -27,15 +27,20 @@ from .types.attached_resources import ( AttachedCluster, AttachedClusterError, + AttachedClusterGroup, AttachedClustersAuthorization, AttachedClusterUser, AttachedOidcConfig, AttachedPlatformVersionInfo, + AttachedProxyConfig, AttachedServerConfig, + KubernetesSecret, ) from .types.attached_service import ( CreateAttachedClusterRequest, DeleteAttachedClusterRequest, + GenerateAttachedClusterAgentTokenRequest, + GenerateAttachedClusterAgentTokenResponse, GenerateAttachedClusterInstallManifestRequest, GenerateAttachedClusterInstallManifestResponse, GetAttachedClusterRequest, @@ -50,22 +55,29 @@ AwsAutoscalingGroupMetricsCollection, AwsCluster, AwsClusterError, + AwsClusterGroup, AwsClusterNetworking, AwsClusterUser, AwsConfigEncryption, AwsControlPlane, AwsDatabaseEncryption, AwsInstancePlacement, + AwsJsonWebKeys, AwsK8sVersionInfo, AwsNodeConfig, + AwsNodeManagement, AwsNodePool, AwsNodePoolAutoscaling, AwsNodePoolError, + AwsOpenIdConfig, AwsProxyConfig, AwsServerConfig, AwsServicesAuthentication, AwsSshConfig, AwsVolumeTemplate, + SpotConfig, + SurgeSettings, + UpdateSettings, ) from .types.aws_service import ( CreateAwsClusterRequest, @@ -74,13 +86,18 @@ DeleteAwsNodePoolRequest, GenerateAwsAccessTokenRequest, GenerateAwsAccessTokenResponse, + GenerateAwsClusterAgentTokenRequest, + GenerateAwsClusterAgentTokenResponse, GetAwsClusterRequest, + GetAwsJsonWebKeysRequest, GetAwsNodePoolRequest, + GetAwsOpenIdConfigRequest, GetAwsServerConfigRequest, ListAwsClustersRequest, ListAwsClustersResponse, ListAwsNodePoolsRequest, ListAwsNodePoolsResponse, + RollbackAwsNodePoolUpdateRequest, UpdateAwsClusterRequest, UpdateAwsNodePoolRequest, ) @@ -89,6 +106,7 @@ AzureClient, AzureCluster, AzureClusterError, + AzureClusterGroup, AzureClusterNetworking, AzureClusterResources, AzureClusterUser, @@ -96,11 +114,14 @@ AzureControlPlane, AzureDatabaseEncryption, AzureDiskTemplate, + AzureJsonWebKeys, AzureK8sVersionInfo, AzureNodeConfig, + AzureNodeManagement, AzureNodePool, AzureNodePoolAutoscaling, AzureNodePoolError, + AzureOpenIdConfig, AzureProxyConfig, AzureServerConfig, AzureServicesAuthentication, @@ -116,9 +137,13 @@ DeleteAzureNodePoolRequest, GenerateAzureAccessTokenRequest, GenerateAzureAccessTokenResponse, + GenerateAzureClusterAgentTokenRequest, + GenerateAzureClusterAgentTokenResponse, GetAzureClientRequest, GetAzureClusterRequest, + GetAzureJsonWebKeysRequest, GetAzureNodePoolRequest, + GetAzureOpenIdConfigRequest, GetAzureServerConfigRequest, ListAzureClientsRequest, ListAzureClientsResponse, @@ -130,7 +155,9 @@ UpdateAzureNodePoolRequest, ) from .types.common_resources import ( + BinaryAuthorization, Fleet, + Jwk, LoggingComponentConfig, LoggingConfig, ManagedPrometheusConfig, @@ -147,16 +174,19 @@ "AzureClustersAsyncClient", "AttachedCluster", "AttachedClusterError", + "AttachedClusterGroup", "AttachedClusterUser", "AttachedClustersAuthorization", "AttachedClustersClient", "AttachedOidcConfig", "AttachedPlatformVersionInfo", + "AttachedProxyConfig", "AttachedServerConfig", "AwsAuthorization", "AwsAutoscalingGroupMetricsCollection", "AwsCluster", "AwsClusterError", + "AwsClusterGroup", "AwsClusterNetworking", "AwsClusterUser", "AwsClustersClient", @@ -164,11 +194,14 @@ "AwsControlPlane", "AwsDatabaseEncryption", "AwsInstancePlacement", + "AwsJsonWebKeys", "AwsK8sVersionInfo", "AwsNodeConfig", + "AwsNodeManagement", "AwsNodePool", "AwsNodePoolAutoscaling", "AwsNodePoolError", + "AwsOpenIdConfig", "AwsProxyConfig", "AwsServerConfig", "AwsServicesAuthentication", @@ -178,6 +211,7 @@ "AzureClient", "AzureCluster", "AzureClusterError", + "AzureClusterGroup", "AzureClusterNetworking", "AzureClusterResources", "AzureClusterUser", @@ -186,15 +220,19 @@ "AzureControlPlane", "AzureDatabaseEncryption", "AzureDiskTemplate", + "AzureJsonWebKeys", "AzureK8sVersionInfo", "AzureNodeConfig", + "AzureNodeManagement", "AzureNodePool", "AzureNodePoolAutoscaling", "AzureNodePoolError", + "AzureOpenIdConfig", "AzureProxyConfig", "AzureServerConfig", "AzureServicesAuthentication", "AzureSshConfig", + "BinaryAuthorization", "CreateAttachedClusterRequest", "CreateAwsClusterRequest", "CreateAwsNodePoolRequest", @@ -208,22 +246,34 @@ "DeleteAzureClusterRequest", "DeleteAzureNodePoolRequest", "Fleet", + "GenerateAttachedClusterAgentTokenRequest", + "GenerateAttachedClusterAgentTokenResponse", "GenerateAttachedClusterInstallManifestRequest", "GenerateAttachedClusterInstallManifestResponse", "GenerateAwsAccessTokenRequest", "GenerateAwsAccessTokenResponse", + "GenerateAwsClusterAgentTokenRequest", + "GenerateAwsClusterAgentTokenResponse", "GenerateAzureAccessTokenRequest", "GenerateAzureAccessTokenResponse", + "GenerateAzureClusterAgentTokenRequest", + "GenerateAzureClusterAgentTokenResponse", "GetAttachedClusterRequest", "GetAttachedServerConfigRequest", "GetAwsClusterRequest", + "GetAwsJsonWebKeysRequest", "GetAwsNodePoolRequest", + "GetAwsOpenIdConfigRequest", "GetAwsServerConfigRequest", "GetAzureClientRequest", "GetAzureClusterRequest", + "GetAzureJsonWebKeysRequest", "GetAzureNodePoolRequest", + "GetAzureOpenIdConfigRequest", "GetAzureServerConfigRequest", "ImportAttachedClusterRequest", + "Jwk", + "KubernetesSecret", "ListAttachedClustersRequest", "ListAttachedClustersResponse", "ListAwsClustersRequest", @@ -244,10 +294,14 @@ "NodeTaint", "OperationMetadata", "ReplicaPlacement", + "RollbackAwsNodePoolUpdateRequest", + "SpotConfig", + "SurgeSettings", "UpdateAttachedClusterRequest", "UpdateAwsClusterRequest", "UpdateAwsNodePoolRequest", "UpdateAzureClusterRequest", "UpdateAzureNodePoolRequest", + "UpdateSettings", "WorkloadIdentityConfig", ) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_metadata.json b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_metadata.json index c62102472757..544cf3260044 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_metadata.json +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_metadata.json @@ -20,6 +20,11 @@ "delete_attached_cluster" ] }, + "GenerateAttachedClusterAgentToken": { + "methods": [ + "generate_attached_cluster_agent_token" + ] + }, "GenerateAttachedClusterInstallManifest": { "methods": [ "generate_attached_cluster_install_manifest" @@ -65,6 +70,11 @@ "delete_attached_cluster" ] }, + "GenerateAttachedClusterAgentToken": { + "methods": [ + "generate_attached_cluster_agent_token" + ] + }, "GenerateAttachedClusterInstallManifest": { "methods": [ "generate_attached_cluster_install_manifest" @@ -110,6 +120,11 @@ "delete_attached_cluster" ] }, + "GenerateAttachedClusterAgentToken": { + "methods": [ + "generate_attached_cluster_agent_token" + ] + }, "GenerateAttachedClusterInstallManifest": { "methods": [ "generate_attached_cluster_install_manifest" @@ -174,16 +189,31 @@ "generate_aws_access_token" ] }, + "GenerateAwsClusterAgentToken": { + "methods": [ + "generate_aws_cluster_agent_token" + ] + }, "GetAwsCluster": { "methods": [ "get_aws_cluster" ] }, + "GetAwsJsonWebKeys": { + "methods": [ + "get_aws_json_web_keys" + ] + }, "GetAwsNodePool": { "methods": [ "get_aws_node_pool" ] }, + "GetAwsOpenIdConfig": { + "methods": [ + "get_aws_open_id_config" + ] + }, "GetAwsServerConfig": { "methods": [ "get_aws_server_config" @@ -199,6 +229,11 @@ "list_aws_node_pools" ] }, + "RollbackAwsNodePoolUpdate": { + "methods": [ + "rollback_aws_node_pool_update" + ] + }, "UpdateAwsCluster": { "methods": [ "update_aws_cluster" @@ -239,16 +274,31 @@ "generate_aws_access_token" ] }, + "GenerateAwsClusterAgentToken": { + "methods": [ + "generate_aws_cluster_agent_token" + ] + }, "GetAwsCluster": { "methods": [ "get_aws_cluster" ] }, + "GetAwsJsonWebKeys": { + "methods": [ + "get_aws_json_web_keys" + ] + }, "GetAwsNodePool": { "methods": [ "get_aws_node_pool" ] }, + "GetAwsOpenIdConfig": { + "methods": [ + "get_aws_open_id_config" + ] + }, "GetAwsServerConfig": { "methods": [ "get_aws_server_config" @@ -264,6 +314,11 @@ "list_aws_node_pools" ] }, + "RollbackAwsNodePoolUpdate": { + "methods": [ + "rollback_aws_node_pool_update" + ] + }, "UpdateAwsCluster": { "methods": [ "update_aws_cluster" @@ -304,16 +359,31 @@ "generate_aws_access_token" ] }, + "GenerateAwsClusterAgentToken": { + "methods": [ + "generate_aws_cluster_agent_token" + ] + }, "GetAwsCluster": { "methods": [ "get_aws_cluster" ] }, + "GetAwsJsonWebKeys": { + "methods": [ + "get_aws_json_web_keys" + ] + }, "GetAwsNodePool": { "methods": [ "get_aws_node_pool" ] }, + "GetAwsOpenIdConfig": { + "methods": [ + "get_aws_open_id_config" + ] + }, "GetAwsServerConfig": { "methods": [ "get_aws_server_config" @@ -329,6 +399,11 @@ "list_aws_node_pools" ] }, + "RollbackAwsNodePoolUpdate": { + "methods": [ + "rollback_aws_node_pool_update" + ] + }, "UpdateAwsCluster": { "methods": [ "update_aws_cluster" @@ -383,6 +458,11 @@ "generate_azure_access_token" ] }, + "GenerateAzureClusterAgentToken": { + "methods": [ + "generate_azure_cluster_agent_token" + ] + }, "GetAzureClient": { "methods": [ "get_azure_client" @@ -393,11 +473,21 @@ "get_azure_cluster" ] }, + "GetAzureJsonWebKeys": { + "methods": [ + "get_azure_json_web_keys" + ] + }, "GetAzureNodePool": { "methods": [ "get_azure_node_pool" ] }, + "GetAzureOpenIdConfig": { + "methods": [ + "get_azure_open_id_config" + ] + }, "GetAzureServerConfig": { "methods": [ "get_azure_server_config" @@ -468,6 +558,11 @@ "generate_azure_access_token" ] }, + "GenerateAzureClusterAgentToken": { + "methods": [ + "generate_azure_cluster_agent_token" + ] + }, "GetAzureClient": { "methods": [ "get_azure_client" @@ -478,11 +573,21 @@ "get_azure_cluster" ] }, + "GetAzureJsonWebKeys": { + "methods": [ + "get_azure_json_web_keys" + ] + }, "GetAzureNodePool": { "methods": [ "get_azure_node_pool" ] }, + "GetAzureOpenIdConfig": { + "methods": [ + "get_azure_open_id_config" + ] + }, "GetAzureServerConfig": { "methods": [ "get_azure_server_config" @@ -553,6 +658,11 @@ "generate_azure_access_token" ] }, + "GenerateAzureClusterAgentToken": { + "methods": [ + "generate_azure_cluster_agent_token" + ] + }, "GetAzureClient": { "methods": [ "get_azure_client" @@ -563,11 +673,21 @@ "get_azure_cluster" ] }, + "GetAzureJsonWebKeys": { + "methods": [ + "get_azure_json_web_keys" + ] + }, "GetAzureNodePool": { "methods": [ "get_azure_node_pool" ] }, + "GetAzureOpenIdConfig": { + "methods": [ + "get_azure_open_id_config" + ] + }, "GetAzureServerConfig": { "methods": [ "get_azure_server_config" diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index 76bb756dc95d..7257802240c5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.4" # {x-release-please-version} +__version__ = "0.6.5" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py index 51847696b09b..b9e6f4935e84 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py @@ -474,12 +474,16 @@ async def sample_update_attached_cluster(): repeated paths field can only include these fields from [AttachedCluster][google.cloud.gkemulticloud.v1.AttachedCluster]: - - ``description``. - ``annotations``. - - ``platform_version``. + - ``authorization.admin_groups``. - ``authorization.admin_users``. + - ``binary_authorization.evaluation_mode``. + - ``description``. - ``logging_config.component_config.enable_components``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``platform_version``. + - ``proxy_config.kubernetes_secret.name``. + - ``proxy_config.kubernetes_secret.namespace``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1383,6 +1387,99 @@ async def sample_generate_attached_cluster_install_manifest(): # Done; return the response. return response + async def generate_attached_cluster_agent_token( + self, + request: Optional[ + Union[attached_service.GenerateAttachedClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attached_service.GenerateAttachedClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_generate_attached_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AttachedClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAttachedClusterAgentTokenRequest( + attached_cluster="attached_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_attached_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenRequest, dict]]): + The request object. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + request = attached_service.GenerateAttachedClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_attached_cluster_agent_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("attached_cluster", request.attached_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py index 8d75c530166e..196e89f0fb1a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py @@ -718,12 +718,16 @@ def sample_update_attached_cluster(): repeated paths field can only include these fields from [AttachedCluster][google.cloud.gkemulticloud.v1.AttachedCluster]: - - ``description``. - ``annotations``. - - ``platform_version``. + - ``authorization.admin_groups``. - ``authorization.admin_users``. + - ``binary_authorization.evaluation_mode``. + - ``description``. - ``logging_config.component_config.enable_components``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``platform_version``. + - ``proxy_config.kubernetes_secret.name``. + - ``proxy_config.kubernetes_secret.namespace``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1597,6 +1601,95 @@ def sample_generate_attached_cluster_install_manifest(): # Done; return the response. return response + def generate_attached_cluster_agent_token( + self, + request: Optional[ + Union[attached_service.GenerateAttachedClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attached_service.GenerateAttachedClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_generate_attached_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AttachedClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAttachedClusterAgentTokenRequest( + attached_cluster="attached_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_attached_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenRequest, dict]): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a attached_service.GenerateAttachedClusterAgentTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, attached_service.GenerateAttachedClusterAgentTokenRequest + ): + request = attached_service.GenerateAttachedClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_attached_cluster_agent_token + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("attached_cluster", request.attached_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AttachedClustersClient": return self diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/base.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/base.py index 91115198a9a6..45569b4e371e 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/base.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/base.py @@ -199,6 +199,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.generate_attached_cluster_agent_token: gapic_v1.method.wrap_method( + self.generate_attached_cluster_agent_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -299,6 +313,18 @@ def generate_attached_cluster_install_manifest( ]: raise NotImplementedError() + @property + def generate_attached_cluster_agent_token( + self, + ) -> Callable[ + [attached_service.GenerateAttachedClusterAgentTokenRequest], + Union[ + attached_service.GenerateAttachedClusterAgentTokenResponse, + Awaitable[attached_service.GenerateAttachedClusterAgentTokenResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc.py index 0c289bed9206..3b902a0db0fd 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc.py @@ -504,6 +504,38 @@ def generate_attached_cluster_install_manifest( ) return self._stubs["generate_attached_cluster_install_manifest"] + @property + def generate_attached_cluster_agent_token( + self, + ) -> Callable[ + [attached_service.GenerateAttachedClusterAgentTokenRequest], + attached_service.GenerateAttachedClusterAgentTokenResponse, + ]: + r"""Return a callable for the generate attached cluster + agent token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAttachedClusterAgentTokenRequest], + ~.GenerateAttachedClusterAgentTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_attached_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_attached_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AttachedClusters/GenerateAttachedClusterAgentToken", + request_serializer=attached_service.GenerateAttachedClusterAgentTokenRequest.serialize, + response_deserializer=attached_service.GenerateAttachedClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_attached_cluster_agent_token"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc_asyncio.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc_asyncio.py index 70ec494c1b29..c161cd10419e 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc_asyncio.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/grpc_asyncio.py @@ -514,6 +514,38 @@ def generate_attached_cluster_install_manifest( ) return self._stubs["generate_attached_cluster_install_manifest"] + @property + def generate_attached_cluster_agent_token( + self, + ) -> Callable[ + [attached_service.GenerateAttachedClusterAgentTokenRequest], + Awaitable[attached_service.GenerateAttachedClusterAgentTokenResponse], + ]: + r"""Return a callable for the generate attached cluster + agent token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAttachedClusterAgentTokenRequest], + Awaitable[~.GenerateAttachedClusterAgentTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_attached_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_attached_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AttachedClusters/GenerateAttachedClusterAgentToken", + request_serializer=attached_service.GenerateAttachedClusterAgentTokenRequest.serialize, + response_deserializer=attached_service.GenerateAttachedClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_attached_cluster_agent_token"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py index bb142347f3b8..1a217e507553 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py @@ -87,6 +87,14 @@ def post_delete_attached_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_generate_attached_cluster_agent_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_attached_cluster_agent_token(self, response): + logging.log(f"Received response: {response}") + return response + def pre_generate_attached_cluster_install_manifest(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -191,6 +199,32 @@ def post_delete_attached_cluster( """ return response + def pre_generate_attached_cluster_agent_token( + self, + request: attached_service.GenerateAttachedClusterAgentTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + attached_service.GenerateAttachedClusterAgentTokenRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for generate_attached_cluster_agent_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the AttachedClusters server. + """ + return request, metadata + + def post_generate_attached_cluster_agent_token( + self, response: attached_service.GenerateAttachedClusterAgentTokenResponse + ) -> attached_service.GenerateAttachedClusterAgentTokenResponse: + """Post-rpc interceptor for generate_attached_cluster_agent_token + + Override in a subclass to manipulate the response + after it is returned by the AttachedClusters server but before + it is returned to user code. + """ + return response + def pre_generate_attached_cluster_install_manifest( self, request: attached_service.GenerateAttachedClusterInstallManifestRequest, @@ -773,6 +807,109 @@ def __call__( resp = self._interceptor.post_delete_attached_cluster(resp) return resp + class _GenerateAttachedClusterAgentToken(AttachedClustersRestStub): + def __hash__(self): + return hash("GenerateAttachedClusterAgentToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: attached_service.GenerateAttachedClusterAgentTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> attached_service.GenerateAttachedClusterAgentTokenResponse: + r"""Call the generate attached cluster + agent token method over HTTP. + + Args: + request (~.attached_service.GenerateAttachedClusterAgentTokenRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.attached_service.GenerateAttachedClusterAgentTokenResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{attached_cluster=projects/*/locations/*/attachedClusters/*}:generateAttachedClusterAgentToken", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_generate_attached_cluster_agent_token( + request, metadata + ) + pb_request = attached_service.GenerateAttachedClusterAgentTokenRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = attached_service.GenerateAttachedClusterAgentTokenResponse() + pb_resp = attached_service.GenerateAttachedClusterAgentTokenResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_attached_cluster_agent_token(resp) + return resp + class _GenerateAttachedClusterInstallManifest(AttachedClustersRestStub): def __hash__(self): return hash("GenerateAttachedClusterInstallManifest") @@ -1364,6 +1501,17 @@ def delete_attached_cluster( # In C++ this would require a dynamic_cast return self._DeleteAttachedCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def generate_attached_cluster_agent_token( + self, + ) -> Callable[ + [attached_service.GenerateAttachedClusterAgentTokenRequest], + attached_service.GenerateAttachedClusterAgentTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAttachedClusterAgentToken(self._session, self._host, self._interceptor) # type: ignore + @property def generate_attached_cluster_install_manifest( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py index 5af2fe0337ce..e55017fed19a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py @@ -276,7 +276,6 @@ async def sample_create_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAwsClusterRequest( @@ -446,7 +445,6 @@ async def sample_update_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAwsClusterRequest( @@ -485,6 +483,8 @@ async def sample_update_aws_cluster(): - ``annotations``. - ``control_plane.version``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. + - ``binary_authorization.evaluation_mode``. - ``control_plane.aws_services_authentication.role_arn``. - ``control_plane.aws_services_authentication.role_session_name``. - ``control_plane.config_encryption.kms_key_arn``. @@ -496,6 +496,7 @@ async def sample_update_aws_cluster(): - ``control_plane.root_volume.size_gib``. - ``control_plane.root_volume.volume_type``. - ``control_plane.root_volume.iops``. + - ``control_plane.root_volume.throughput``. - ``control_plane.root_volume.kms_key_arn``. - ``control_plane.ssh_config``. - ``control_plane.ssh_config.ec2_key_pair``. @@ -504,6 +505,7 @@ async def sample_update_aws_cluster(): - ``logging_config.component_config.enable_components``. - ``control_plane.tags``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``networking.per_node_pool_sg_rules_disabled``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -972,6 +974,99 @@ async def sample_delete_aws_cluster(): # Done; return the response. return response + async def generate_aws_cluster_agent_token( + self, + request: Optional[ + Union[aws_service.GenerateAwsClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_service.GenerateAwsClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_generate_aws_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAwsClusterAgentTokenRequest( + aws_cluster="aws_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_aws_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenRequest, dict]]): + The request object. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + request = aws_service.GenerateAwsClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_aws_cluster_agent_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def generate_aws_access_token( self, request: Optional[ @@ -1315,6 +1410,7 @@ async def sample_update_aws_node_pool(): - ``config.config_encryption.kms_key_arn``. - ``config.security_group_ids``. - ``config.root_volume.iops``. + - ``config.root_volume.throughput``. - ``config.root_volume.kms_key_arn``. - ``config.root_volume.volume_type``. - ``config.root_volume.size_gib``. @@ -1330,6 +1426,13 @@ async def sample_update_aws_node_pool(): - ``config.autoscaling_metrics_collection``. - ``config.autoscaling_metrics_collection.granularity``. - ``config.autoscaling_metrics_collection.metrics``. + - ``config.instance_type``. + - ``management.auto_repair``. + - ``management``. + - ``update_settings``. + - ``update_settings.surge_settings``. + - ``update_settings.surge_settings.max_surge``. + - ``update_settings.surge_settings.max_unavailable``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1403,6 +1506,138 @@ async def sample_update_aws_node_pool(): # Done; return the response. return response + async def rollback_aws_node_pool_update( + self, + request: Optional[ + Union[aws_service.RollbackAwsNodePoolUpdateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rolls back a previously aborted or failed + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] update + request. Makes no changes if the last update request + successfully finished. If an update request is in progress, you + cannot rollback the update. You must first cancel or let it + finish unsuccessfully before you can rollback. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_rollback_aws_node_pool_update(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.RollbackAwsNodePoolUpdateRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_aws_node_pool_update(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.RollbackAwsNodePoolUpdateRequest, dict]]): + The request object. Request message for + ``AwsClusters.RollbackAwsNodePoolUpdate`` method. + name (:class:`str`): + Required. The name of the + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] + resource to rollback. + + ``AwsNodePool`` names are formatted as + ``projects//locations//awsClusters//awsNodePools/``. + + See `Resource + Names `__ + for more details on Google Cloud resource names. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gke_multicloud_v1.types.AwsNodePool` + An Anthos node pool running on AWS. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = aws_service.RollbackAwsNodePoolUpdateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_aws_node_pool_update, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + aws_resources.AwsNodePool, + metadata_type=common_resources.OperationMetadata, + ) + + # Done; return the response. + return response + async def get_aws_node_pool( self, request: Optional[Union[aws_service.GetAwsNodePoolRequest, dict]] = None, @@ -1796,6 +2031,199 @@ async def sample_delete_aws_node_pool(): # Done; return the response. return response + async def get_aws_open_id_config( + self, + request: Optional[Union[aws_service.GetAwsOpenIdConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsOpenIdConfig: + r"""Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_get_aws_open_id_config(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsOpenIdConfigRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = await client.get_aws_open_id_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GetAwsOpenIdConfigRequest, dict]]): + The request object. GetAwsOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AwsOpenIdConfig: + AwsOpenIdConfig is an OIDC discovery + document for the cluster. See the OpenID + Connect Discovery 1.0 specification for + details. + + """ + # Create or coerce a protobuf request object. + request = aws_service.GetAwsOpenIdConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_aws_open_id_config, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_aws_json_web_keys( + self, + request: Optional[Union[aws_service.GetAwsJsonWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsJsonWebKeys: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_get_aws_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsJsonWebKeysRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = await client.get_aws_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GetAwsJsonWebKeysRequest, dict]]): + The request object. GetAwsJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AwsJsonWebKeys: + AwsJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + # Create or coerce a protobuf request object. + request = aws_service.GetAwsJsonWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_aws_json_web_keys, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_aws_server_config( self, request: Optional[Union[aws_service.GetAwsServerConfigRequest, dict]] = None, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py index 5b5085c94aca..cd295e6a5940 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py @@ -550,7 +550,6 @@ def sample_create_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAwsClusterRequest( @@ -720,7 +719,6 @@ def sample_update_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAwsClusterRequest( @@ -759,6 +757,8 @@ def sample_update_aws_cluster(): - ``annotations``. - ``control_plane.version``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. + - ``binary_authorization.evaluation_mode``. - ``control_plane.aws_services_authentication.role_arn``. - ``control_plane.aws_services_authentication.role_session_name``. - ``control_plane.config_encryption.kms_key_arn``. @@ -770,6 +770,7 @@ def sample_update_aws_cluster(): - ``control_plane.root_volume.size_gib``. - ``control_plane.root_volume.volume_type``. - ``control_plane.root_volume.iops``. + - ``control_plane.root_volume.throughput``. - ``control_plane.root_volume.kms_key_arn``. - ``control_plane.ssh_config``. - ``control_plane.ssh_config.ec2_key_pair``. @@ -778,6 +779,7 @@ def sample_update_aws_cluster(): - ``logging_config.component_config.enable_components``. - ``control_plane.tags``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``networking.per_node_pool_sg_rules_disabled``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1228,6 +1230,93 @@ def sample_delete_aws_cluster(): # Done; return the response. return response + def generate_aws_cluster_agent_token( + self, + request: Optional[ + Union[aws_service.GenerateAwsClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_service.GenerateAwsClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_generate_aws_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAwsClusterAgentTokenRequest( + aws_cluster="aws_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_aws_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenRequest, dict]): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a aws_service.GenerateAwsClusterAgentTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, aws_service.GenerateAwsClusterAgentTokenRequest): + request = aws_service.GenerateAwsClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_aws_cluster_agent_token + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def generate_aws_access_token( self, request: Optional[ @@ -1565,6 +1654,7 @@ def sample_update_aws_node_pool(): - ``config.config_encryption.kms_key_arn``. - ``config.security_group_ids``. - ``config.root_volume.iops``. + - ``config.root_volume.throughput``. - ``config.root_volume.kms_key_arn``. - ``config.root_volume.volume_type``. - ``config.root_volume.size_gib``. @@ -1580,6 +1670,13 @@ def sample_update_aws_node_pool(): - ``config.autoscaling_metrics_collection``. - ``config.autoscaling_metrics_collection.granularity``. - ``config.autoscaling_metrics_collection.metrics``. + - ``config.instance_type``. + - ``management.auto_repair``. + - ``management``. + - ``update_settings``. + - ``update_settings.surge_settings``. + - ``update_settings.surge_settings.max_surge``. + - ``update_settings.surge_settings.max_unavailable``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1653,6 +1750,140 @@ def sample_update_aws_node_pool(): # Done; return the response. return response + def rollback_aws_node_pool_update( + self, + request: Optional[ + Union[aws_service.RollbackAwsNodePoolUpdateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rolls back a previously aborted or failed + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] update + request. Makes no changes if the last update request + successfully finished. If an update request is in progress, you + cannot rollback the update. You must first cancel or let it + finish unsuccessfully before you can rollback. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_rollback_aws_node_pool_update(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.RollbackAwsNodePoolUpdateRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_aws_node_pool_update(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.RollbackAwsNodePoolUpdateRequest, dict]): + The request object. Request message for + ``AwsClusters.RollbackAwsNodePoolUpdate`` method. + name (str): + Required. The name of the + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] + resource to rollback. + + ``AwsNodePool`` names are formatted as + ``projects//locations//awsClusters//awsNodePools/``. + + See `Resource + Names `__ + for more details on Google Cloud resource names. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.gke_multicloud_v1.types.AwsNodePool` + An Anthos node pool running on AWS. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a aws_service.RollbackAwsNodePoolUpdateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, aws_service.RollbackAwsNodePoolUpdateRequest): + request = aws_service.RollbackAwsNodePoolUpdateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rollback_aws_node_pool_update + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + aws_resources.AwsNodePool, + metadata_type=common_resources.OperationMetadata, + ) + + # Done; return the response. + return response + def get_aws_node_pool( self, request: Optional[Union[aws_service.GetAwsNodePoolRequest, dict]] = None, @@ -2028,6 +2259,183 @@ def sample_delete_aws_node_pool(): # Done; return the response. return response + def get_aws_open_id_config( + self, + request: Optional[Union[aws_service.GetAwsOpenIdConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsOpenIdConfig: + r"""Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_get_aws_open_id_config(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsOpenIdConfigRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = client.get_aws_open_id_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GetAwsOpenIdConfigRequest, dict]): + The request object. GetAwsOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AwsOpenIdConfig: + AwsOpenIdConfig is an OIDC discovery + document for the cluster. See the OpenID + Connect Discovery 1.0 specification for + details. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a aws_service.GetAwsOpenIdConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, aws_service.GetAwsOpenIdConfigRequest): + request = aws_service.GetAwsOpenIdConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_aws_open_id_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_aws_json_web_keys( + self, + request: Optional[Union[aws_service.GetAwsJsonWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsJsonWebKeys: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_get_aws_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsJsonWebKeysRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = client.get_aws_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GetAwsJsonWebKeysRequest, dict]): + The request object. GetAwsJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AwsJsonWebKeys: + AwsJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a aws_service.GetAwsJsonWebKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, aws_service.GetAwsJsonWebKeysRequest): + request = aws_service.GetAwsJsonWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_aws_json_web_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("aws_cluster", request.aws_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_aws_server_config( self, request: Optional[Union[aws_service.GetAwsServerConfigRequest, dict]] = None, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/base.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/base.py index 3a6d400b4064..a33e5ac619d1 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/base.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/base.py @@ -166,6 +166,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.generate_aws_cluster_agent_token: gapic_v1.method.wrap_method( + self.generate_aws_cluster_agent_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.generate_aws_access_token: gapic_v1.method.wrap_method( self.generate_aws_access_token, default_retry=retries.Retry( @@ -190,6 +204,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.rollback_aws_node_pool_update: gapic_v1.method.wrap_method( + self.rollback_aws_node_pool_update, + default_timeout=60.0, + client_info=client_info, + ), self.get_aws_node_pool: gapic_v1.method.wrap_method( self.get_aws_node_pool, default_retry=retries.Retry( @@ -223,6 +242,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_aws_open_id_config: gapic_v1.method.wrap_method( + self.get_aws_open_id_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_aws_json_web_keys: gapic_v1.method.wrap_method( + self.get_aws_json_web_keys, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_aws_server_config: gapic_v1.method.wrap_method( self.get_aws_server_config, default_retry=retries.Retry( @@ -301,6 +348,18 @@ def delete_aws_cluster( ]: raise NotImplementedError() + @property + def generate_aws_cluster_agent_token( + self, + ) -> Callable[ + [aws_service.GenerateAwsClusterAgentTokenRequest], + Union[ + aws_service.GenerateAwsClusterAgentTokenResponse, + Awaitable[aws_service.GenerateAwsClusterAgentTokenResponse], + ], + ]: + raise NotImplementedError() + @property def generate_aws_access_token( self, @@ -331,6 +390,15 @@ def update_aws_node_pool( ]: raise NotImplementedError() + @property + def rollback_aws_node_pool_update( + self, + ) -> Callable[ + [aws_service.RollbackAwsNodePoolUpdateRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_aws_node_pool( self, @@ -361,6 +429,24 @@ def delete_aws_node_pool( ]: raise NotImplementedError() + @property + def get_aws_open_id_config( + self, + ) -> Callable[ + [aws_service.GetAwsOpenIdConfigRequest], + Union[aws_resources.AwsOpenIdConfig, Awaitable[aws_resources.AwsOpenIdConfig]], + ]: + raise NotImplementedError() + + @property + def get_aws_json_web_keys( + self, + ) -> Callable[ + [aws_service.GetAwsJsonWebKeysRequest], + Union[aws_resources.AwsJsonWebKeys, Awaitable[aws_resources.AwsJsonWebKeys]], + ]: + raise NotImplementedError() + @property def get_aws_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc.py index 8fd23924ac1b..e4d03771edd3 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc.py @@ -396,6 +396,38 @@ def delete_aws_cluster( ) return self._stubs["delete_aws_cluster"] + @property + def generate_aws_cluster_agent_token( + self, + ) -> Callable[ + [aws_service.GenerateAwsClusterAgentTokenRequest], + aws_service.GenerateAwsClusterAgentTokenResponse, + ]: + r"""Return a callable for the generate aws cluster agent + token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAwsClusterAgentTokenRequest], + ~.GenerateAwsClusterAgentTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_aws_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_aws_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GenerateAwsClusterAgentToken", + request_serializer=aws_service.GenerateAwsClusterAgentTokenRequest.serialize, + response_deserializer=aws_service.GenerateAwsClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_aws_cluster_agent_token"] + @property def generate_aws_access_token( self, @@ -486,6 +518,41 @@ def update_aws_node_pool( ) return self._stubs["update_aws_node_pool"] + @property + def rollback_aws_node_pool_update( + self, + ) -> Callable[ + [aws_service.RollbackAwsNodePoolUpdateRequest], operations_pb2.Operation + ]: + r"""Return a callable for the rollback aws node pool update method over gRPC. + + Rolls back a previously aborted or failed + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] update + request. Makes no changes if the last update request + successfully finished. If an update request is in progress, you + cannot rollback the update. You must first cancel or let it + finish unsuccessfully before you can rollback. + + Returns: + Callable[[~.RollbackAwsNodePoolUpdateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_aws_node_pool_update" not in self._stubs: + self._stubs[ + "rollback_aws_node_pool_update" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/RollbackAwsNodePoolUpdate", + request_serializer=aws_service.RollbackAwsNodePoolUpdateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_aws_node_pool_update"] + @property def get_aws_node_pool( self, @@ -577,6 +644,64 @@ def delete_aws_node_pool( ) return self._stubs["delete_aws_node_pool"] + @property + def get_aws_open_id_config( + self, + ) -> Callable[ + [aws_service.GetAwsOpenIdConfigRequest], aws_resources.AwsOpenIdConfig + ]: + r"""Return a callable for the get aws open id config method over gRPC. + + Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + Returns: + Callable[[~.GetAwsOpenIdConfigRequest], + ~.AwsOpenIdConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_aws_open_id_config" not in self._stubs: + self._stubs["get_aws_open_id_config"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GetAwsOpenIdConfig", + request_serializer=aws_service.GetAwsOpenIdConfigRequest.serialize, + response_deserializer=aws_resources.AwsOpenIdConfig.deserialize, + ) + return self._stubs["get_aws_open_id_config"] + + @property + def get_aws_json_web_keys( + self, + ) -> Callable[[aws_service.GetAwsJsonWebKeysRequest], aws_resources.AwsJsonWebKeys]: + r"""Return a callable for the get aws json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + + Returns: + Callable[[~.GetAwsJsonWebKeysRequest], + ~.AwsJsonWebKeys]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_aws_json_web_keys" not in self._stubs: + self._stubs["get_aws_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GetAwsJsonWebKeys", + request_serializer=aws_service.GetAwsJsonWebKeysRequest.serialize, + response_deserializer=aws_resources.AwsJsonWebKeys.deserialize, + ) + return self._stubs["get_aws_json_web_keys"] + @property def get_aws_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc_asyncio.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc_asyncio.py index bce4e42a48cd..ec7aaece46f2 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc_asyncio.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/grpc_asyncio.py @@ -410,6 +410,38 @@ def delete_aws_cluster( ) return self._stubs["delete_aws_cluster"] + @property + def generate_aws_cluster_agent_token( + self, + ) -> Callable[ + [aws_service.GenerateAwsClusterAgentTokenRequest], + Awaitable[aws_service.GenerateAwsClusterAgentTokenResponse], + ]: + r"""Return a callable for the generate aws cluster agent + token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAwsClusterAgentTokenRequest], + Awaitable[~.GenerateAwsClusterAgentTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_aws_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_aws_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GenerateAwsClusterAgentToken", + request_serializer=aws_service.GenerateAwsClusterAgentTokenRequest.serialize, + response_deserializer=aws_service.GenerateAwsClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_aws_cluster_agent_token"] + @property def generate_aws_access_token( self, @@ -504,6 +536,42 @@ def update_aws_node_pool( ) return self._stubs["update_aws_node_pool"] + @property + def rollback_aws_node_pool_update( + self, + ) -> Callable[ + [aws_service.RollbackAwsNodePoolUpdateRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the rollback aws node pool update method over gRPC. + + Rolls back a previously aborted or failed + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] update + request. Makes no changes if the last update request + successfully finished. If an update request is in progress, you + cannot rollback the update. You must first cancel or let it + finish unsuccessfully before you can rollback. + + Returns: + Callable[[~.RollbackAwsNodePoolUpdateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_aws_node_pool_update" not in self._stubs: + self._stubs[ + "rollback_aws_node_pool_update" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/RollbackAwsNodePoolUpdate", + request_serializer=aws_service.RollbackAwsNodePoolUpdateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_aws_node_pool_update"] + @property def get_aws_node_pool( self, @@ -600,6 +668,67 @@ def delete_aws_node_pool( ) return self._stubs["delete_aws_node_pool"] + @property + def get_aws_open_id_config( + self, + ) -> Callable[ + [aws_service.GetAwsOpenIdConfigRequest], + Awaitable[aws_resources.AwsOpenIdConfig], + ]: + r"""Return a callable for the get aws open id config method over gRPC. + + Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + Returns: + Callable[[~.GetAwsOpenIdConfigRequest], + Awaitable[~.AwsOpenIdConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_aws_open_id_config" not in self._stubs: + self._stubs["get_aws_open_id_config"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GetAwsOpenIdConfig", + request_serializer=aws_service.GetAwsOpenIdConfigRequest.serialize, + response_deserializer=aws_resources.AwsOpenIdConfig.deserialize, + ) + return self._stubs["get_aws_open_id_config"] + + @property + def get_aws_json_web_keys( + self, + ) -> Callable[ + [aws_service.GetAwsJsonWebKeysRequest], Awaitable[aws_resources.AwsJsonWebKeys] + ]: + r"""Return a callable for the get aws json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + + Returns: + Callable[[~.GetAwsJsonWebKeysRequest], + Awaitable[~.AwsJsonWebKeys]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_aws_json_web_keys" not in self._stubs: + self._stubs["get_aws_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AwsClusters/GetAwsJsonWebKeys", + request_serializer=aws_service.GetAwsJsonWebKeysRequest.serialize, + response_deserializer=aws_resources.AwsJsonWebKeys.deserialize, + ) + return self._stubs["get_aws_json_web_keys"] + @property def get_aws_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py index b31a76ce79ff..ca74a01967d7 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py @@ -111,6 +111,14 @@ def post_generate_aws_access_token(self, response): logging.log(f"Received response: {response}") return response + def pre_generate_aws_cluster_agent_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_aws_cluster_agent_token(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_aws_cluster(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -119,6 +127,14 @@ def post_get_aws_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_get_aws_json_web_keys(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_aws_json_web_keys(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_aws_node_pool(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +143,14 @@ def post_get_aws_node_pool(self, response): logging.log(f"Received response: {response}") return response + def pre_get_aws_open_id_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_aws_open_id_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_aws_server_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -151,6 +175,14 @@ def post_list_aws_node_pools(self, response): logging.log(f"Received response: {response}") return response + def pre_rollback_aws_node_pool_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rollback_aws_node_pool_update(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_aws_cluster(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -288,6 +320,31 @@ def post_generate_aws_access_token( """ return response + def pre_generate_aws_cluster_agent_token( + self, + request: aws_service.GenerateAwsClusterAgentTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + aws_service.GenerateAwsClusterAgentTokenRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for generate_aws_cluster_agent_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the AwsClusters server. + """ + return request, metadata + + def post_generate_aws_cluster_agent_token( + self, response: aws_service.GenerateAwsClusterAgentTokenResponse + ) -> aws_service.GenerateAwsClusterAgentTokenResponse: + """Post-rpc interceptor for generate_aws_cluster_agent_token + + Override in a subclass to manipulate the response + after it is returned by the AwsClusters server but before + it is returned to user code. + """ + return response + def pre_get_aws_cluster( self, request: aws_service.GetAwsClusterRequest, @@ -311,6 +368,29 @@ def post_get_aws_cluster( """ return response + def pre_get_aws_json_web_keys( + self, + request: aws_service.GetAwsJsonWebKeysRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[aws_service.GetAwsJsonWebKeysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_aws_json_web_keys + + Override in a subclass to manipulate the request or metadata + before they are sent to the AwsClusters server. + """ + return request, metadata + + def post_get_aws_json_web_keys( + self, response: aws_resources.AwsJsonWebKeys + ) -> aws_resources.AwsJsonWebKeys: + """Post-rpc interceptor for get_aws_json_web_keys + + Override in a subclass to manipulate the response + after it is returned by the AwsClusters server but before + it is returned to user code. + """ + return response + def pre_get_aws_node_pool( self, request: aws_service.GetAwsNodePoolRequest, @@ -334,6 +414,29 @@ def post_get_aws_node_pool( """ return response + def pre_get_aws_open_id_config( + self, + request: aws_service.GetAwsOpenIdConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[aws_service.GetAwsOpenIdConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_aws_open_id_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the AwsClusters server. + """ + return request, metadata + + def post_get_aws_open_id_config( + self, response: aws_resources.AwsOpenIdConfig + ) -> aws_resources.AwsOpenIdConfig: + """Post-rpc interceptor for get_aws_open_id_config + + Override in a subclass to manipulate the response + after it is returned by the AwsClusters server but before + it is returned to user code. + """ + return response + def pre_get_aws_server_config( self, request: aws_service.GetAwsServerConfigRequest, @@ -403,6 +506,29 @@ def post_list_aws_node_pools( """ return response + def pre_rollback_aws_node_pool_update( + self, + request: aws_service.RollbackAwsNodePoolUpdateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[aws_service.RollbackAwsNodePoolUpdateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rollback_aws_node_pool_update + + Override in a subclass to manipulate the request or metadata + before they are sent to the AwsClusters server. + """ + return request, metadata + + def post_rollback_aws_node_pool_update( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for rollback_aws_node_pool_update + + Override in a subclass to manipulate the response + after it is returned by the AwsClusters server but before + it is returned to user code. + """ + return response + def pre_update_aws_cluster( self, request: aws_service.UpdateAwsClusterRequest, @@ -1160,6 +1286,102 @@ def __call__( resp = self._interceptor.post_generate_aws_access_token(resp) return resp + class _GenerateAwsClusterAgentToken(AwsClustersRestStub): + def __hash__(self): + return hash("GenerateAwsClusterAgentToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: aws_service.GenerateAwsClusterAgentTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_service.GenerateAwsClusterAgentTokenResponse: + r"""Call the generate aws cluster + agent token method over HTTP. + + Args: + request (~.aws_service.GenerateAwsClusterAgentTokenRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.aws_service.GenerateAwsClusterAgentTokenResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{aws_cluster=projects/*/locations/*/awsClusters/*}:generateAwsClusterAgentToken", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_generate_aws_cluster_agent_token( + request, metadata + ) + pb_request = aws_service.GenerateAwsClusterAgentTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = aws_service.GenerateAwsClusterAgentTokenResponse() + pb_resp = aws_service.GenerateAwsClusterAgentTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_aws_cluster_agent_token(resp) + return resp + class _GetAwsCluster(AwsClustersRestStub): def __hash__(self): return hash("GetAwsCluster") @@ -1245,6 +1467,98 @@ def __call__( resp = self._interceptor.post_get_aws_cluster(resp) return resp + class _GetAwsJsonWebKeys(AwsClustersRestStub): + def __hash__(self): + return hash("GetAwsJsonWebKeys") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: aws_service.GetAwsJsonWebKeysRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsJsonWebKeys: + r"""Call the get aws json web keys method over HTTP. + + Args: + request (~.aws_service.GetAwsJsonWebKeysRequest): + The request object. GetAwsJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.aws_resources.AwsJsonWebKeys: + AwsJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{aws_cluster=projects/*/locations/*/awsClusters/*}/jwks", + }, + ] + request, metadata = self._interceptor.pre_get_aws_json_web_keys( + request, metadata + ) + pb_request = aws_service.GetAwsJsonWebKeysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = aws_resources.AwsJsonWebKeys() + pb_resp = aws_resources.AwsJsonWebKeys.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_aws_json_web_keys(resp) + return resp + class _GetAwsNodePool(AwsClustersRestStub): def __hash__(self): return hash("GetAwsNodePool") @@ -1332,6 +1646,99 @@ def __call__( resp = self._interceptor.post_get_aws_node_pool(resp) return resp + class _GetAwsOpenIdConfig(AwsClustersRestStub): + def __hash__(self): + return hash("GetAwsOpenIdConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: aws_service.GetAwsOpenIdConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> aws_resources.AwsOpenIdConfig: + r"""Call the get aws open id config method over HTTP. + + Args: + request (~.aws_service.GetAwsOpenIdConfigRequest): + The request object. GetAwsOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.aws_resources.AwsOpenIdConfig: + AwsOpenIdConfig is an OIDC discovery + document for the cluster. See the OpenID + Connect Discovery 1.0 specification for + details. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{aws_cluster=projects/*/locations/*/awsClusters/*}/.well-known/openid-configuration", + }, + ] + request, metadata = self._interceptor.pre_get_aws_open_id_config( + request, metadata + ) + pb_request = aws_service.GetAwsOpenIdConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = aws_resources.AwsOpenIdConfig() + pb_resp = aws_resources.AwsOpenIdConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_aws_open_id_config(resp) + return resp + class _GetAwsServerConfig(AwsClustersRestStub): def __hash__(self): return hash("GetAwsServerConfig") @@ -1599,6 +2006,104 @@ def __call__( resp = self._interceptor.post_list_aws_node_pools(resp) return resp + class _RollbackAwsNodePoolUpdate(AwsClustersRestStub): + def __hash__(self): + return hash("RollbackAwsNodePoolUpdate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: aws_service.RollbackAwsNodePoolUpdateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the rollback aws node pool + update method over HTTP. + + Args: + request (~.aws_service.RollbackAwsNodePoolUpdateRequest): + The request object. Request message for + ``AwsClusters.RollbackAwsNodePoolUpdate`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/awsClusters/*/awsNodePools/*}:rollback", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rollback_aws_node_pool_update( + request, metadata + ) + pb_request = aws_service.RollbackAwsNodePoolUpdateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback_aws_node_pool_update(resp) + return resp + class _UpdateAwsCluster(AwsClustersRestStub): def __hash__(self): return hash("UpdateAwsCluster") @@ -1840,6 +2345,17 @@ def generate_aws_access_token( # In C++ this would require a dynamic_cast return self._GenerateAwsAccessToken(self._session, self._host, self._interceptor) # type: ignore + @property + def generate_aws_cluster_agent_token( + self, + ) -> Callable[ + [aws_service.GenerateAwsClusterAgentTokenRequest], + aws_service.GenerateAwsClusterAgentTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAwsClusterAgentToken(self._session, self._host, self._interceptor) # type: ignore + @property def get_aws_cluster( self, @@ -1848,6 +2364,14 @@ def get_aws_cluster( # In C++ this would require a dynamic_cast return self._GetAwsCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def get_aws_json_web_keys( + self, + ) -> Callable[[aws_service.GetAwsJsonWebKeysRequest], aws_resources.AwsJsonWebKeys]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAwsJsonWebKeys(self._session, self._host, self._interceptor) # type: ignore + @property def get_aws_node_pool( self, @@ -1856,6 +2380,16 @@ def get_aws_node_pool( # In C++ this would require a dynamic_cast return self._GetAwsNodePool(self._session, self._host, self._interceptor) # type: ignore + @property + def get_aws_open_id_config( + self, + ) -> Callable[ + [aws_service.GetAwsOpenIdConfigRequest], aws_resources.AwsOpenIdConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAwsOpenIdConfig(self._session, self._host, self._interceptor) # type: ignore + @property def get_aws_server_config( self, @@ -1886,6 +2420,16 @@ def list_aws_node_pools( # In C++ this would require a dynamic_cast return self._ListAwsNodePools(self._session, self._host, self._interceptor) # type: ignore + @property + def rollback_aws_node_pool_update( + self, + ) -> Callable[ + [aws_service.RollbackAwsNodePoolUpdateRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RollbackAwsNodePoolUpdate(self._session, self._host, self._interceptor) # type: ignore + @property def update_aws_cluster( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py index 6ab52e3a1923..8aadab8e1197 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py @@ -882,7 +882,6 @@ async def sample_create_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAzureClusterRequest( @@ -1049,7 +1048,6 @@ async def sample_update_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAzureClusterRequest( @@ -1090,6 +1088,7 @@ async def sample_update_azure_cluster(): - ``control_plane.vm_size``. - ``annotations``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. - ``control_plane.root_volume.size_gib``. - ``azure_services_authentication``. - ``azure_services_authentication.tenant_id``. @@ -1482,7 +1481,7 @@ async def sample_delete_azure_cluster(): Args: request (Optional[Union[google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest, dict]]): - The request object. Request message for ``Clusters.DeleteAzureCluster`` + The request object. Request message for ``AzureClusters.DeleteAzureCluster`` method. name (:class:`str`): Required. The resource name the @@ -1572,6 +1571,99 @@ async def sample_delete_azure_cluster(): # Done; return the response. return response + async def generate_azure_cluster_agent_token( + self, + request: Optional[ + Union[azure_service.GenerateAzureClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_service.GenerateAzureClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_generate_azure_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAzureClusterAgentTokenRequest( + azure_cluster="azure_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_azure_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenRequest, dict]]): + The request object. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + request = azure_service.GenerateAzureClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_azure_cluster_agent_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def generate_azure_access_token( self, request: Optional[ @@ -1736,8 +1828,8 @@ async def sample_create_azure_node_pool(): [AzureCluster][google.cloud.gkemulticloud.v1.AzureCluster] resource where this node pool will be created. - Location names are formatted as - ``projects//locations/``. + ``AzureCluster`` names are formatted as + ``projects//locations//azureClusters/``. See `Resource Names `__ @@ -1913,6 +2005,8 @@ async def sample_update_azure_node_pool(): - ``autoscaling.min_node_count``. - ``autoscaling.max_node_count``. - ``config.ssh_config.authorized_key``. + - ``management.auto_repair``. + - ``management``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2290,8 +2384,8 @@ async def sample_delete_azure_node_pool(): Args: request (Optional[Union[google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest, dict]]): - The request object. Delete message for ``AzureClusters.DeleteAzureNodePool`` - method. + The request object. Request message for + ``AzureClusters.DeleteAzureNodePool`` method. name (:class:`str`): Required. The resource name the [AzureNodePool][google.cloud.gkemulticloud.v1.AzureNodePool] @@ -2379,6 +2473,250 @@ async def sample_delete_azure_node_pool(): # Done; return the response. return response + async def get_azure_open_id_config( + self, + request: Optional[ + Union[azure_service.GetAzureOpenIdConfigRequest, dict] + ] = None, + *, + azure_cluster: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureOpenIdConfig: + r"""Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_get_azure_open_id_config(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureOpenIdConfigRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = await client.get_azure_open_id_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GetAzureOpenIdConfigRequest, dict]]): + The request object. GetAzureOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + azure_cluster (:class:`str`): + Required. The AzureCluster, which + owns the OIDC discovery document. + Format: + + projects//locations//azureClusters/ + + This corresponds to the ``azure_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AzureOpenIdConfig: + AzureOpenIdConfig is an OIDC + discovery document for the cluster. See + the OpenID Connect Discovery 1.0 + specification for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([azure_cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = azure_service.GetAzureOpenIdConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if azure_cluster is not None: + request.azure_cluster = azure_cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_azure_open_id_config, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_azure_json_web_keys( + self, + request: Optional[Union[azure_service.GetAzureJsonWebKeysRequest, dict]] = None, + *, + azure_cluster: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureJsonWebKeys: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + async def sample_get_azure_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureJsonWebKeysRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = await client.get_azure_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gke_multicloud_v1.types.GetAzureJsonWebKeysRequest, dict]]): + The request object. GetAzureJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + azure_cluster (:class:`str`): + Required. The AzureCluster, which + owns the JsonWebKeys. Format: + + projects//locations//azureClusters/ + + This corresponds to the ``azure_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AzureJsonWebKeys: + AzureJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([azure_cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = azure_service.GetAzureJsonWebKeysRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if azure_cluster is not None: + request.azure_cluster = azure_cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_azure_json_web_keys, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_azure_server_config( self, request: Optional[ diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py index f5415199daa0..7f9906884da8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py @@ -1146,7 +1146,6 @@ def sample_create_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAzureClusterRequest( @@ -1313,7 +1312,6 @@ def sample_update_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAzureClusterRequest( @@ -1354,6 +1352,7 @@ def sample_update_azure_cluster(): - ``control_plane.vm_size``. - ``annotations``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. - ``control_plane.root_volume.size_gib``. - ``azure_services_authentication``. - ``azure_services_authentication.tenant_id``. @@ -1728,7 +1727,7 @@ def sample_delete_azure_cluster(): Args: request (Union[google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest, dict]): - The request object. Request message for ``Clusters.DeleteAzureCluster`` + The request object. Request message for ``AzureClusters.DeleteAzureCluster`` method. name (str): Required. The resource name the @@ -1818,6 +1817,93 @@ def sample_delete_azure_cluster(): # Done; return the response. return response + def generate_azure_cluster_agent_token( + self, + request: Optional[ + Union[azure_service.GenerateAzureClusterAgentTokenRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_service.GenerateAzureClusterAgentTokenResponse: + r"""Generates an access token for a cluster agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_generate_azure_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAzureClusterAgentTokenRequest( + azure_cluster="azure_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_azure_cluster_agent_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenRequest, dict]): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a azure_service.GenerateAzureClusterAgentTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, azure_service.GenerateAzureClusterAgentTokenRequest): + request = azure_service.GenerateAzureClusterAgentTokenRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.generate_azure_cluster_agent_token + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def generate_azure_access_token( self, request: Optional[ @@ -1976,8 +2062,8 @@ def sample_create_azure_node_pool(): [AzureCluster][google.cloud.gkemulticloud.v1.AzureCluster] resource where this node pool will be created. - Location names are formatted as - ``projects//locations/``. + ``AzureCluster`` names are formatted as + ``projects//locations//azureClusters/``. See `Resource Names `__ @@ -2153,6 +2239,8 @@ def sample_update_azure_node_pool(): - ``autoscaling.min_node_count``. - ``autoscaling.max_node_count``. - ``config.ssh_config.authorized_key``. + - ``management.auto_repair``. + - ``management``. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2512,8 +2600,8 @@ def sample_delete_azure_node_pool(): Args: request (Union[google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest, dict]): - The request object. Delete message for ``AzureClusters.DeleteAzureNodePool`` - method. + The request object. Request message for + ``AzureClusters.DeleteAzureNodePool`` method. name (str): Required. The resource name the [AzureNodePool][google.cloud.gkemulticloud.v1.AzureNodePool] @@ -2601,6 +2689,232 @@ def sample_delete_azure_node_pool(): # Done; return the response. return response + def get_azure_open_id_config( + self, + request: Optional[ + Union[azure_service.GetAzureOpenIdConfigRequest, dict] + ] = None, + *, + azure_cluster: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureOpenIdConfig: + r"""Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_get_azure_open_id_config(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureOpenIdConfigRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = client.get_azure_open_id_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GetAzureOpenIdConfigRequest, dict]): + The request object. GetAzureOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + azure_cluster (str): + Required. The AzureCluster, which + owns the OIDC discovery document. + Format: + + projects//locations//azureClusters/ + + This corresponds to the ``azure_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AzureOpenIdConfig: + AzureOpenIdConfig is an OIDC + discovery document for the cluster. See + the OpenID Connect Discovery 1.0 + specification for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([azure_cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a azure_service.GetAzureOpenIdConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, azure_service.GetAzureOpenIdConfigRequest): + request = azure_service.GetAzureOpenIdConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if azure_cluster is not None: + request.azure_cluster = azure_cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_azure_open_id_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_azure_json_web_keys( + self, + request: Optional[Union[azure_service.GetAzureJsonWebKeysRequest, dict]] = None, + *, + azure_cluster: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureJsonWebKeys: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gke_multicloud_v1 + + def sample_get_azure_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureJsonWebKeysRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = client.get_azure_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gke_multicloud_v1.types.GetAzureJsonWebKeysRequest, dict]): + The request object. GetAzureJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + azure_cluster (str): + Required. The AzureCluster, which + owns the JsonWebKeys. Format: + + projects//locations//azureClusters/ + + This corresponds to the ``azure_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gke_multicloud_v1.types.AzureJsonWebKeys: + AzureJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([azure_cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a azure_service.GetAzureJsonWebKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, azure_service.GetAzureJsonWebKeysRequest): + request = azure_service.GetAzureJsonWebKeysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if azure_cluster is not None: + request.azure_cluster = azure_cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_azure_json_web_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("azure_cluster", request.azure_cluster),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_azure_server_config( self, request: Optional[ diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/base.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/base.py index b76d868f8052..2910949a8f63 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/base.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/base.py @@ -204,6 +204,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.generate_azure_cluster_agent_token: gapic_v1.method.wrap_method( + self.generate_azure_cluster_agent_token, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.generate_azure_access_token: gapic_v1.method.wrap_method( self.generate_azure_access_token, default_retry=retries.Retry( @@ -261,6 +275,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_azure_open_id_config: gapic_v1.method.wrap_method( + self.get_azure_open_id_config, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_azure_json_web_keys: gapic_v1.method.wrap_method( + self.get_azure_json_web_keys, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_azure_server_config: gapic_v1.method.wrap_method( self.get_azure_server_config, default_retry=retries.Retry( @@ -378,6 +420,18 @@ def delete_azure_cluster( ]: raise NotImplementedError() + @property + def generate_azure_cluster_agent_token( + self, + ) -> Callable[ + [azure_service.GenerateAzureClusterAgentTokenRequest], + Union[ + azure_service.GenerateAzureClusterAgentTokenResponse, + Awaitable[azure_service.GenerateAzureClusterAgentTokenResponse], + ], + ]: + raise NotImplementedError() + @property def generate_azure_access_token( self, @@ -438,6 +492,30 @@ def delete_azure_node_pool( ]: raise NotImplementedError() + @property + def get_azure_open_id_config( + self, + ) -> Callable[ + [azure_service.GetAzureOpenIdConfigRequest], + Union[ + azure_resources.AzureOpenIdConfig, + Awaitable[azure_resources.AzureOpenIdConfig], + ], + ]: + raise NotImplementedError() + + @property + def get_azure_json_web_keys( + self, + ) -> Callable[ + [azure_service.GetAzureJsonWebKeysRequest], + Union[ + azure_resources.AzureJsonWebKeys, + Awaitable[azure_resources.AzureJsonWebKeys], + ], + ]: + raise NotImplementedError() + @property def get_azure_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc.py index a4f9b8148021..8f1751f9192b 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc.py @@ -529,6 +529,38 @@ def delete_azure_cluster( ) return self._stubs["delete_azure_cluster"] + @property + def generate_azure_cluster_agent_token( + self, + ) -> Callable[ + [azure_service.GenerateAzureClusterAgentTokenRequest], + azure_service.GenerateAzureClusterAgentTokenResponse, + ]: + r"""Return a callable for the generate azure cluster agent + token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAzureClusterAgentTokenRequest], + ~.GenerateAzureClusterAgentTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_azure_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_azure_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GenerateAzureClusterAgentToken", + request_serializer=azure_service.GenerateAzureClusterAgentTokenRequest.serialize, + response_deserializer=azure_service.GenerateAzureClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_azure_cluster_agent_token"] + @property def generate_azure_access_token( self, @@ -714,6 +746,66 @@ def delete_azure_node_pool( ) return self._stubs["delete_azure_node_pool"] + @property + def get_azure_open_id_config( + self, + ) -> Callable[ + [azure_service.GetAzureOpenIdConfigRequest], azure_resources.AzureOpenIdConfig + ]: + r"""Return a callable for the get azure open id config method over gRPC. + + Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + Returns: + Callable[[~.GetAzureOpenIdConfigRequest], + ~.AzureOpenIdConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_azure_open_id_config" not in self._stubs: + self._stubs["get_azure_open_id_config"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GetAzureOpenIdConfig", + request_serializer=azure_service.GetAzureOpenIdConfigRequest.serialize, + response_deserializer=azure_resources.AzureOpenIdConfig.deserialize, + ) + return self._stubs["get_azure_open_id_config"] + + @property + def get_azure_json_web_keys( + self, + ) -> Callable[ + [azure_service.GetAzureJsonWebKeysRequest], azure_resources.AzureJsonWebKeys + ]: + r"""Return a callable for the get azure json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + + Returns: + Callable[[~.GetAzureJsonWebKeysRequest], + ~.AzureJsonWebKeys]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_azure_json_web_keys" not in self._stubs: + self._stubs["get_azure_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GetAzureJsonWebKeys", + request_serializer=azure_service.GetAzureJsonWebKeysRequest.serialize, + response_deserializer=azure_resources.AzureJsonWebKeys.deserialize, + ) + return self._stubs["get_azure_json_web_keys"] + @property def get_azure_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc_asyncio.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc_asyncio.py index 99267bf31db1..05c98366a86b 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc_asyncio.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/grpc_asyncio.py @@ -549,6 +549,38 @@ def delete_azure_cluster( ) return self._stubs["delete_azure_cluster"] + @property + def generate_azure_cluster_agent_token( + self, + ) -> Callable[ + [azure_service.GenerateAzureClusterAgentTokenRequest], + Awaitable[azure_service.GenerateAzureClusterAgentTokenResponse], + ]: + r"""Return a callable for the generate azure cluster agent + token method over gRPC. + + Generates an access token for a cluster agent. + + Returns: + Callable[[~.GenerateAzureClusterAgentTokenRequest], + Awaitable[~.GenerateAzureClusterAgentTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_azure_cluster_agent_token" not in self._stubs: + self._stubs[ + "generate_azure_cluster_agent_token" + ] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GenerateAzureClusterAgentToken", + request_serializer=azure_service.GenerateAzureClusterAgentTokenRequest.serialize, + response_deserializer=azure_service.GenerateAzureClusterAgentTokenResponse.deserialize, + ) + return self._stubs["generate_azure_cluster_agent_token"] + @property def generate_azure_access_token( self, @@ -741,6 +773,68 @@ def delete_azure_node_pool( ) return self._stubs["delete_azure_node_pool"] + @property + def get_azure_open_id_config( + self, + ) -> Callable[ + [azure_service.GetAzureOpenIdConfigRequest], + Awaitable[azure_resources.AzureOpenIdConfig], + ]: + r"""Return a callable for the get azure open id config method over gRPC. + + Gets the OIDC discovery document for the cluster. See the + `OpenID Connect Discovery 1.0 + specification `__ + for details. + + Returns: + Callable[[~.GetAzureOpenIdConfigRequest], + Awaitable[~.AzureOpenIdConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_azure_open_id_config" not in self._stubs: + self._stubs["get_azure_open_id_config"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GetAzureOpenIdConfig", + request_serializer=azure_service.GetAzureOpenIdConfigRequest.serialize, + response_deserializer=azure_resources.AzureOpenIdConfig.deserialize, + ) + return self._stubs["get_azure_open_id_config"] + + @property + def get_azure_json_web_keys( + self, + ) -> Callable[ + [azure_service.GetAzureJsonWebKeysRequest], + Awaitable[azure_resources.AzureJsonWebKeys], + ]: + r"""Return a callable for the get azure json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + + Returns: + Callable[[~.GetAzureJsonWebKeysRequest], + Awaitable[~.AzureJsonWebKeys]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_azure_json_web_keys" not in self._stubs: + self._stubs["get_azure_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.gkemulticloud.v1.AzureClusters/GetAzureJsonWebKeys", + request_serializer=azure_service.GetAzureJsonWebKeysRequest.serialize, + response_deserializer=azure_resources.AzureJsonWebKeys.deserialize, + ) + return self._stubs["get_azure_json_web_keys"] + @property def get_azure_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py index 6aa924f0080f..eb263eb1d97e 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py @@ -127,6 +127,14 @@ def post_generate_azure_access_token(self, response): logging.log(f"Received response: {response}") return response + def pre_generate_azure_cluster_agent_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_azure_cluster_agent_token(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_azure_client(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -143,6 +151,14 @@ def post_get_azure_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_get_azure_json_web_keys(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_azure_json_web_keys(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_azure_node_pool(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -151,6 +167,14 @@ def post_get_azure_node_pool(self, response): logging.log(f"Received response: {response}") return response + def pre_get_azure_open_id_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_azure_open_id_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_azure_server_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -368,6 +392,31 @@ def post_generate_azure_access_token( """ return response + def pre_generate_azure_cluster_agent_token( + self, + request: azure_service.GenerateAzureClusterAgentTokenRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + azure_service.GenerateAzureClusterAgentTokenRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for generate_azure_cluster_agent_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the AzureClusters server. + """ + return request, metadata + + def post_generate_azure_cluster_agent_token( + self, response: azure_service.GenerateAzureClusterAgentTokenResponse + ) -> azure_service.GenerateAzureClusterAgentTokenResponse: + """Post-rpc interceptor for generate_azure_cluster_agent_token + + Override in a subclass to manipulate the response + after it is returned by the AzureClusters server but before + it is returned to user code. + """ + return response + def pre_get_azure_client( self, request: azure_service.GetAzureClientRequest, @@ -414,6 +463,29 @@ def post_get_azure_cluster( """ return response + def pre_get_azure_json_web_keys( + self, + request: azure_service.GetAzureJsonWebKeysRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[azure_service.GetAzureJsonWebKeysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_azure_json_web_keys + + Override in a subclass to manipulate the request or metadata + before they are sent to the AzureClusters server. + """ + return request, metadata + + def post_get_azure_json_web_keys( + self, response: azure_resources.AzureJsonWebKeys + ) -> azure_resources.AzureJsonWebKeys: + """Post-rpc interceptor for get_azure_json_web_keys + + Override in a subclass to manipulate the response + after it is returned by the AzureClusters server but before + it is returned to user code. + """ + return response + def pre_get_azure_node_pool( self, request: azure_service.GetAzureNodePoolRequest, @@ -437,6 +509,29 @@ def post_get_azure_node_pool( """ return response + def pre_get_azure_open_id_config( + self, + request: azure_service.GetAzureOpenIdConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[azure_service.GetAzureOpenIdConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_azure_open_id_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the AzureClusters server. + """ + return request, metadata + + def post_get_azure_open_id_config( + self, response: azure_resources.AzureOpenIdConfig + ) -> azure_resources.AzureOpenIdConfig: + """Post-rpc interceptor for get_azure_open_id_config + + Override in a subclass to manipulate the response + after it is returned by the AzureClusters server but before + it is returned to user code. + """ + return response + def pre_get_azure_server_config( self, request: azure_service.GetAzureServerConfigRequest, @@ -1234,7 +1329,7 @@ def __call__( Args: request (~.azure_service.DeleteAzureClusterRequest): - The request object. Request message for ``Clusters.DeleteAzureCluster`` + The request object. Request message for ``AzureClusters.DeleteAzureCluster`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1322,8 +1417,8 @@ def __call__( Args: request (~.azure_service.DeleteAzureNodePoolRequest): - The request object. Delete message for ``AzureClusters.DeleteAzureNodePool`` - method. + The request object. Request message for + ``AzureClusters.DeleteAzureNodePool`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1474,6 +1569,105 @@ def __call__( resp = self._interceptor.post_generate_azure_access_token(resp) return resp + class _GenerateAzureClusterAgentToken(AzureClustersRestStub): + def __hash__(self): + return hash("GenerateAzureClusterAgentToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: azure_service.GenerateAzureClusterAgentTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_service.GenerateAzureClusterAgentTokenResponse: + r"""Call the generate azure cluster + agent token method over HTTP. + + Args: + request (~.azure_service.GenerateAzureClusterAgentTokenRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.azure_service.GenerateAzureClusterAgentTokenResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{azure_cluster=projects/*/locations/*/azureClusters/*}:generateAzureClusterAgentToken", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_generate_azure_cluster_agent_token( + request, metadata + ) + pb_request = azure_service.GenerateAzureClusterAgentTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = azure_service.GenerateAzureClusterAgentTokenResponse() + pb_resp = azure_service.GenerateAzureClusterAgentTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_azure_cluster_agent_token(resp) + return resp + class _GetAzureClient(AzureClustersRestStub): def __hash__(self): return hash("GetAzureClient") @@ -1664,6 +1858,98 @@ def __call__( resp = self._interceptor.post_get_azure_cluster(resp) return resp + class _GetAzureJsonWebKeys(AzureClustersRestStub): + def __hash__(self): + return hash("GetAzureJsonWebKeys") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: azure_service.GetAzureJsonWebKeysRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureJsonWebKeys: + r"""Call the get azure json web keys method over HTTP. + + Args: + request (~.azure_service.GetAzureJsonWebKeysRequest): + The request object. GetAzureJsonWebKeysRequest gets the public component of + the keys used by the cluster to sign token requests. + This will be the jwks_uri for the discover document + returned by getOpenIDConfig. See the OpenID Connect + Discovery 1.0 specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.azure_resources.AzureJsonWebKeys: + AzureJsonWebKeys is a valid JSON Web + Key Set as specififed in RFC 7517. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{azure_cluster=projects/*/locations/*/azureClusters/*}/jwks", + }, + ] + request, metadata = self._interceptor.pre_get_azure_json_web_keys( + request, metadata + ) + pb_request = azure_service.GetAzureJsonWebKeysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = azure_resources.AzureJsonWebKeys() + pb_resp = azure_resources.AzureJsonWebKeys.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_azure_json_web_keys(resp) + return resp + class _GetAzureNodePool(AzureClustersRestStub): def __hash__(self): return hash("GetAzureNodePool") @@ -1751,6 +2037,99 @@ def __call__( resp = self._interceptor.post_get_azure_node_pool(resp) return resp + class _GetAzureOpenIdConfig(AzureClustersRestStub): + def __hash__(self): + return hash("GetAzureOpenIdConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: azure_service.GetAzureOpenIdConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> azure_resources.AzureOpenIdConfig: + r"""Call the get azure open id config method over HTTP. + + Args: + request (~.azure_service.GetAzureOpenIdConfigRequest): + The request object. GetAzureOpenIdConfigRequest gets the + OIDC discovery document for the cluster. + See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.azure_resources.AzureOpenIdConfig: + AzureOpenIdConfig is an OIDC + discovery document for the cluster. See + the OpenID Connect Discovery 1.0 + specification for details. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{azure_cluster=projects/*/locations/*/azureClusters/*}/.well-known/openid-configuration", + }, + ] + request, metadata = self._interceptor.pre_get_azure_open_id_config( + request, metadata + ) + pb_request = azure_service.GetAzureOpenIdConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = azure_resources.AzureOpenIdConfig() + pb_resp = azure_resources.AzureOpenIdConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_azure_open_id_config(resp) + return resp + class _GetAzureServerConfig(AzureClustersRestStub): def __hash__(self): return hash("GetAzureServerConfig") @@ -2366,6 +2745,17 @@ def generate_azure_access_token( # In C++ this would require a dynamic_cast return self._GenerateAzureAccessToken(self._session, self._host, self._interceptor) # type: ignore + @property + def generate_azure_cluster_agent_token( + self, + ) -> Callable[ + [azure_service.GenerateAzureClusterAgentTokenRequest], + azure_service.GenerateAzureClusterAgentTokenResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAzureClusterAgentToken(self._session, self._host, self._interceptor) # type: ignore + @property def get_azure_client( self, @@ -2382,6 +2772,16 @@ def get_azure_cluster( # In C++ this would require a dynamic_cast return self._GetAzureCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def get_azure_json_web_keys( + self, + ) -> Callable[ + [azure_service.GetAzureJsonWebKeysRequest], azure_resources.AzureJsonWebKeys + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAzureJsonWebKeys(self._session, self._host, self._interceptor) # type: ignore + @property def get_azure_node_pool( self, @@ -2392,6 +2792,16 @@ def get_azure_node_pool( # In C++ this would require a dynamic_cast return self._GetAzureNodePool(self._session, self._host, self._interceptor) # type: ignore + @property + def get_azure_open_id_config( + self, + ) -> Callable[ + [azure_service.GetAzureOpenIdConfigRequest], azure_resources.AzureOpenIdConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAzureOpenIdConfig(self._session, self._host, self._interceptor) # type: ignore + @property def get_azure_server_config( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py index a20365dd08b6..f02d31e4e146 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py @@ -16,15 +16,20 @@ from .attached_resources import ( AttachedCluster, AttachedClusterError, + AttachedClusterGroup, AttachedClustersAuthorization, AttachedClusterUser, AttachedOidcConfig, AttachedPlatformVersionInfo, + AttachedProxyConfig, AttachedServerConfig, + KubernetesSecret, ) from .attached_service import ( CreateAttachedClusterRequest, DeleteAttachedClusterRequest, + GenerateAttachedClusterAgentTokenRequest, + GenerateAttachedClusterAgentTokenResponse, GenerateAttachedClusterInstallManifestRequest, GenerateAttachedClusterInstallManifestResponse, GetAttachedClusterRequest, @@ -39,22 +44,29 @@ AwsAutoscalingGroupMetricsCollection, AwsCluster, AwsClusterError, + AwsClusterGroup, AwsClusterNetworking, AwsClusterUser, AwsConfigEncryption, AwsControlPlane, AwsDatabaseEncryption, AwsInstancePlacement, + AwsJsonWebKeys, AwsK8sVersionInfo, AwsNodeConfig, + AwsNodeManagement, AwsNodePool, AwsNodePoolAutoscaling, AwsNodePoolError, + AwsOpenIdConfig, AwsProxyConfig, AwsServerConfig, AwsServicesAuthentication, AwsSshConfig, AwsVolumeTemplate, + SpotConfig, + SurgeSettings, + UpdateSettings, ) from .aws_service import ( CreateAwsClusterRequest, @@ -63,13 +75,18 @@ DeleteAwsNodePoolRequest, GenerateAwsAccessTokenRequest, GenerateAwsAccessTokenResponse, + GenerateAwsClusterAgentTokenRequest, + GenerateAwsClusterAgentTokenResponse, GetAwsClusterRequest, + GetAwsJsonWebKeysRequest, GetAwsNodePoolRequest, + GetAwsOpenIdConfigRequest, GetAwsServerConfigRequest, ListAwsClustersRequest, ListAwsClustersResponse, ListAwsNodePoolsRequest, ListAwsNodePoolsResponse, + RollbackAwsNodePoolUpdateRequest, UpdateAwsClusterRequest, UpdateAwsNodePoolRequest, ) @@ -78,6 +95,7 @@ AzureClient, AzureCluster, AzureClusterError, + AzureClusterGroup, AzureClusterNetworking, AzureClusterResources, AzureClusterUser, @@ -85,11 +103,14 @@ AzureControlPlane, AzureDatabaseEncryption, AzureDiskTemplate, + AzureJsonWebKeys, AzureK8sVersionInfo, AzureNodeConfig, + AzureNodeManagement, AzureNodePool, AzureNodePoolAutoscaling, AzureNodePoolError, + AzureOpenIdConfig, AzureProxyConfig, AzureServerConfig, AzureServicesAuthentication, @@ -105,9 +126,13 @@ DeleteAzureNodePoolRequest, GenerateAzureAccessTokenRequest, GenerateAzureAccessTokenResponse, + GenerateAzureClusterAgentTokenRequest, + GenerateAzureClusterAgentTokenResponse, GetAzureClientRequest, GetAzureClusterRequest, + GetAzureJsonWebKeysRequest, GetAzureNodePoolRequest, + GetAzureOpenIdConfigRequest, GetAzureServerConfigRequest, ListAzureClientsRequest, ListAzureClientsResponse, @@ -119,7 +144,9 @@ UpdateAzureNodePoolRequest, ) from .common_resources import ( + BinaryAuthorization, Fleet, + Jwk, LoggingComponentConfig, LoggingConfig, ManagedPrometheusConfig, @@ -133,13 +160,18 @@ __all__ = ( "AttachedCluster", "AttachedClusterError", + "AttachedClusterGroup", "AttachedClustersAuthorization", "AttachedClusterUser", "AttachedOidcConfig", "AttachedPlatformVersionInfo", + "AttachedProxyConfig", "AttachedServerConfig", + "KubernetesSecret", "CreateAttachedClusterRequest", "DeleteAttachedClusterRequest", + "GenerateAttachedClusterAgentTokenRequest", + "GenerateAttachedClusterAgentTokenResponse", "GenerateAttachedClusterInstallManifestRequest", "GenerateAttachedClusterInstallManifestResponse", "GetAttachedClusterRequest", @@ -152,41 +184,54 @@ "AwsAutoscalingGroupMetricsCollection", "AwsCluster", "AwsClusterError", + "AwsClusterGroup", "AwsClusterNetworking", "AwsClusterUser", "AwsConfigEncryption", "AwsControlPlane", "AwsDatabaseEncryption", "AwsInstancePlacement", + "AwsJsonWebKeys", "AwsK8sVersionInfo", "AwsNodeConfig", + "AwsNodeManagement", "AwsNodePool", "AwsNodePoolAutoscaling", "AwsNodePoolError", + "AwsOpenIdConfig", "AwsProxyConfig", "AwsServerConfig", "AwsServicesAuthentication", "AwsSshConfig", "AwsVolumeTemplate", + "SpotConfig", + "SurgeSettings", + "UpdateSettings", "CreateAwsClusterRequest", "CreateAwsNodePoolRequest", "DeleteAwsClusterRequest", "DeleteAwsNodePoolRequest", "GenerateAwsAccessTokenRequest", "GenerateAwsAccessTokenResponse", + "GenerateAwsClusterAgentTokenRequest", + "GenerateAwsClusterAgentTokenResponse", "GetAwsClusterRequest", + "GetAwsJsonWebKeysRequest", "GetAwsNodePoolRequest", + "GetAwsOpenIdConfigRequest", "GetAwsServerConfigRequest", "ListAwsClustersRequest", "ListAwsClustersResponse", "ListAwsNodePoolsRequest", "ListAwsNodePoolsResponse", + "RollbackAwsNodePoolUpdateRequest", "UpdateAwsClusterRequest", "UpdateAwsNodePoolRequest", "AzureAuthorization", "AzureClient", "AzureCluster", "AzureClusterError", + "AzureClusterGroup", "AzureClusterNetworking", "AzureClusterResources", "AzureClusterUser", @@ -194,11 +239,14 @@ "AzureControlPlane", "AzureDatabaseEncryption", "AzureDiskTemplate", + "AzureJsonWebKeys", "AzureK8sVersionInfo", "AzureNodeConfig", + "AzureNodeManagement", "AzureNodePool", "AzureNodePoolAutoscaling", "AzureNodePoolError", + "AzureOpenIdConfig", "AzureProxyConfig", "AzureServerConfig", "AzureServicesAuthentication", @@ -212,9 +260,13 @@ "DeleteAzureNodePoolRequest", "GenerateAzureAccessTokenRequest", "GenerateAzureAccessTokenResponse", + "GenerateAzureClusterAgentTokenRequest", + "GenerateAzureClusterAgentTokenResponse", "GetAzureClientRequest", "GetAzureClusterRequest", + "GetAzureJsonWebKeysRequest", "GetAzureNodePoolRequest", + "GetAzureOpenIdConfigRequest", "GetAzureServerConfigRequest", "ListAzureClientsRequest", "ListAzureClientsResponse", @@ -224,7 +276,9 @@ "ListAzureNodePoolsResponse", "UpdateAzureClusterRequest", "UpdateAzureNodePoolRequest", + "BinaryAuthorization", "Fleet", + "Jwk", "LoggingComponentConfig", "LoggingConfig", "ManagedPrometheusConfig", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py index 3c5ae607b8eb..b52737038b23 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py @@ -28,10 +28,13 @@ "AttachedCluster", "AttachedClustersAuthorization", "AttachedClusterUser", + "AttachedClusterGroup", "AttachedOidcConfig", "AttachedServerConfig", "AttachedPlatformVersionInfo", "AttachedClusterError", + "AttachedProxyConfig", + "KubernetesSecret", }, ) @@ -67,7 +70,7 @@ class AttachedCluster(proto.Message): Required. The Kubernetes distribution of the underlying attached cluster. - Supported values: ["eks", "aks"]. + Supported values: ["eks", "aks", "generic"]. cluster_region (str): Output only. The region where this cluster runs. @@ -125,6 +128,12 @@ class AttachedCluster(proto.Message): monitoring_config (google.cloud.gke_multicloud_v1.types.MonitoringConfig): Optional. Monitoring configuration for this cluster. + proxy_config (google.cloud.gke_multicloud_v1.types.AttachedProxyConfig): + Optional. Proxy configuration for outbound + HTTP(S) traffic. + binary_authorization (google.cloud.gke_multicloud_v1.types.BinaryAuthorization): + Optional. Binary Authorization configuration + for this cluster. """ class State(proto.Enum): @@ -253,6 +262,16 @@ class State(proto.Enum): number=23, message=common_resources.MonitoringConfig, ) + proxy_config: "AttachedProxyConfig" = proto.Field( + proto.MESSAGE, + number=24, + message="AttachedProxyConfig", + ) + binary_authorization: common_resources.BinaryAuthorization = proto.Field( + proto.MESSAGE, + number=25, + message=common_resources.BinaryAuthorization, + ) class AttachedClustersAuthorization(proto.Message): @@ -260,11 +279,19 @@ class AttachedClustersAuthorization(proto.Message): Attributes: admin_users (MutableSequence[google.cloud.gke_multicloud_v1.types.AttachedClusterUser]): - Required. Users that can perform operations as a cluster + Optional. Users that can perform operations as a cluster admin. A managed ClusterRoleBinding will be created to grant the ``cluster-admin`` ClusterRole to the users. Up to ten admin users can be provided. + For more info on RBAC, see + https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles + admin_groups (MutableSequence[google.cloud.gke_multicloud_v1.types.AttachedClusterGroup]): + Optional. Groups of users that can perform operations as a + cluster admin. A managed ClusterRoleBinding will be created + to grant the ``cluster-admin`` ClusterRole to the groups. Up + to ten admin groups can be provided. + For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles """ @@ -274,6 +301,11 @@ class AttachedClustersAuthorization(proto.Message): number=1, message="AttachedClusterUser", ) + admin_groups: MutableSequence["AttachedClusterGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AttachedClusterGroup", + ) class AttachedClusterUser(proto.Message): @@ -291,6 +323,21 @@ class AttachedClusterUser(proto.Message): ) +class AttachedClusterGroup(proto.Message): + r"""Identities of a group-type subject for Attached clusters. + + Attributes: + group (str): + Required. The name of the group, e.g. + ``my-group@domain.com``. + """ + + group: str = proto.Field( + proto.STRING, + number=1, + ) + + class AttachedOidcConfig(proto.Message): r"""OIDC discovery information of the target cluster. @@ -385,4 +432,43 @@ class AttachedClusterError(proto.Message): ) +class AttachedProxyConfig(proto.Message): + r"""Details of a proxy config. + + Attributes: + kubernetes_secret (google.cloud.gke_multicloud_v1.types.KubernetesSecret): + The Kubernetes Secret resource that contains + the HTTP(S) proxy configuration. The secret must + be a JSON encoded proxy configuration as + described in + """ + + kubernetes_secret: "KubernetesSecret" = proto.Field( + proto.MESSAGE, + number=1, + message="KubernetesSecret", + ) + + +class KubernetesSecret(proto.Message): + r"""Information about a Kubernetes Secret + + Attributes: + name (str): + Name of the kubernetes secret. + namespace (str): + Namespace in which the kubernetes secret is + stored. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + namespace: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py index eddb86cb7d5a..d50508193b54 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py @@ -35,6 +35,8 @@ "ListAttachedClustersResponse", "DeleteAttachedClusterRequest", "GetAttachedServerConfigRequest", + "GenerateAttachedClusterAgentTokenRequest", + "GenerateAttachedClusterAgentTokenResponse", }, ) @@ -80,6 +82,9 @@ class GenerateAttachedClusterInstallManifestRequest(proto.Message): You can list all supported versions on a given Google Cloud region by calling [GetAttachedServerConfig][google.cloud.gkemulticloud.v1.AttachedClusters.GetAttachedServerConfig]. + proxy_config (google.cloud.gke_multicloud_v1.types.AttachedProxyConfig): + Optional. Proxy configuration for outbound + HTTP(S) traffic. """ parent: str = proto.Field( @@ -94,6 +99,11 @@ class GenerateAttachedClusterInstallManifestRequest(proto.Message): proto.STRING, number=3, ) + proxy_config: attached_resources.AttachedProxyConfig = proto.Field( + proto.MESSAGE, + number=4, + message=attached_resources.AttachedProxyConfig, + ) class GenerateAttachedClusterInstallManifestResponse(proto.Message): @@ -202,6 +212,9 @@ class ImportAttachedClusterRequest(proto.Message): attached cluster. Supported values: ["eks", "aks"]. + proxy_config (google.cloud.gke_multicloud_v1.types.AttachedProxyConfig): + Optional. Proxy configuration for outbound + HTTP(S) traffic. """ parent: str = proto.Field( @@ -224,6 +237,11 @@ class ImportAttachedClusterRequest(proto.Message): proto.STRING, number=5, ) + proxy_config: attached_resources.AttachedProxyConfig = proto.Field( + proto.MESSAGE, + number=6, + message=attached_resources.AttachedProxyConfig, + ) class UpdateAttachedClusterRequest(proto.Message): @@ -244,12 +262,16 @@ class UpdateAttachedClusterRequest(proto.Message): paths field can only include these fields from [AttachedCluster][google.cloud.gkemulticloud.v1.AttachedCluster]: - - ``description``. - ``annotations``. - - ``platform_version``. + - ``authorization.admin_groups``. - ``authorization.admin_users``. + - ``binary_authorization.evaluation_mode``. + - ``description``. - ``logging_config.component_config.enable_components``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``platform_version``. + - ``proxy_config.kubernetes_secret.name``. + - ``proxy_config.kubernetes_secret.namespace``. """ attached_cluster: attached_resources.AttachedCluster = proto.Field( @@ -460,4 +482,92 @@ class GetAttachedServerConfigRequest(proto.Message): ) +class GenerateAttachedClusterAgentTokenRequest(proto.Message): + r""" + + Attributes: + attached_cluster (str): + Required. + subject_token (str): + Required. + subject_token_type (str): + Required. + version (str): + Required. + grant_type (str): + Optional. + audience (str): + Optional. + scope (str): + Optional. + requested_token_type (str): + Optional. + options (str): + Optional. + """ + + attached_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + subject_token: str = proto.Field( + proto.STRING, + number=2, + ) + subject_token_type: str = proto.Field( + proto.STRING, + number=3, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + grant_type: str = proto.Field( + proto.STRING, + number=6, + ) + audience: str = proto.Field( + proto.STRING, + number=7, + ) + scope: str = proto.Field( + proto.STRING, + number=8, + ) + requested_token_type: str = proto.Field( + proto.STRING, + number=9, + ) + options: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GenerateAttachedClusterAgentTokenResponse(proto.Message): + r""" + + Attributes: + access_token (str): + + expires_in (int): + + token_type (str): + + """ + + access_token: str = proto.Field( + proto.STRING, + number=1, + ) + expires_in: int = proto.Field( + proto.INT32, + number=2, + ) + token_type: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py index 7165973d01bb..619a76f0533e 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore import proto # type: ignore from google.cloud.gke_multicloud_v1.types import common_resources @@ -30,12 +31,18 @@ "AwsServicesAuthentication", "AwsAuthorization", "AwsClusterUser", + "AwsClusterGroup", "AwsDatabaseEncryption", "AwsVolumeTemplate", "AwsClusterNetworking", "AwsNodePool", + "UpdateSettings", + "SurgeSettings", + "AwsNodeManagement", "AwsNodeConfig", "AwsNodePoolAutoscaling", + "AwsOpenIdConfig", + "AwsJsonWebKeys", "AwsServerConfig", "AwsK8sVersionInfo", "AwsSshConfig", @@ -43,6 +50,7 @@ "AwsConfigEncryption", "AwsInstancePlacement", "AwsAutoscalingGroupMetricsCollection", + "SpotConfig", "AwsClusterError", "AwsNodePoolError", }, @@ -135,6 +143,9 @@ class AwsCluster(proto.Message): monitoring_config (google.cloud.gke_multicloud_v1.types.MonitoringConfig): Optional. Monitoring configuration for this cluster. + binary_authorization (google.cloud.gke_multicloud_v1.types.BinaryAuthorization): + Optional. Binary Authorization configuration + for this cluster. """ class State(proto.Enum): @@ -264,6 +275,11 @@ class State(proto.Enum): number=21, message=common_resources.MonitoringConfig, ) + binary_authorization: common_resources.BinaryAuthorization = proto.Field( + proto.MESSAGE, + number=22, + message=common_resources.BinaryAuthorization, + ) class AwsControlPlane(proto.Message): @@ -447,11 +463,19 @@ class AwsAuthorization(proto.Message): Attributes: admin_users (MutableSequence[google.cloud.gke_multicloud_v1.types.AwsClusterUser]): - Required. Users that can perform operations as a cluster + Optional. Users that can perform operations as a cluster admin. A managed ClusterRoleBinding will be created to grant the ``cluster-admin`` ClusterRole to the users. Up to ten admin users can be provided. + For more info on RBAC, see + https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles + admin_groups (MutableSequence[google.cloud.gke_multicloud_v1.types.AwsClusterGroup]): + Optional. Groups of users that can perform operations as a + cluster admin. A managed ClusterRoleBinding will be created + to grant the ``cluster-admin`` ClusterRole to the groups. Up + to ten admin groups can be provided. + For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles """ @@ -461,6 +485,11 @@ class AwsAuthorization(proto.Message): number=1, message="AwsClusterUser", ) + admin_groups: MutableSequence["AwsClusterGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AwsClusterGroup", + ) class AwsClusterUser(proto.Message): @@ -478,6 +507,21 @@ class AwsClusterUser(proto.Message): ) +class AwsClusterGroup(proto.Message): + r"""Identities of a group-type subject for AWS clusters. + + Attributes: + group (str): + Required. The name of the group, e.g. + ``my-group@domain.com``. + """ + + group: str = proto.Field( + proto.STRING, + number=1, + ) + + class AwsDatabaseEncryption(proto.Message): r"""Configuration related to application-layer secrets encryption. @@ -511,6 +555,12 @@ class AwsVolumeTemplate(proto.Message): iops (int): Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume. + throughput (int): + Optional. The throughput that the volume supports, in MiB/s. + Only valid if volume_type is GP3. + + If the volume_type is GP3 and this is not speficied, it + defaults to 125. kms_key_arn (str): Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt @@ -553,6 +603,10 @@ class VolumeType(proto.Enum): proto.INT32, number=3, ) + throughput: int = proto.Field( + proto.INT32, + number=5, + ) kms_key_arn: str = proto.Field( proto.STRING, number=4, @@ -582,6 +636,14 @@ class AwsClusterNetworking(proto.Message): assigned an IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation. + per_node_pool_sg_rules_disabled (bool): + Optional. Disable the per node pool subnet + security group rules on the control plane + security group. When set to true, you must also + provide one or more security groups that ensure + node pools are able to send requests to the + control plane on TCP/443 and TCP/8132. Failure + to do so may result in unavailable node pools. """ vpc_id: str = proto.Field( @@ -596,6 +658,10 @@ class AwsClusterNetworking(proto.Message): proto.STRING, number=3, ) + per_node_pool_sg_rules_disabled: bool = proto.Field( + proto.BOOL, + number=5, + ) class AwsNodePool(proto.Message): @@ -666,6 +732,12 @@ class AwsNodePool(proto.Message): errors (MutableSequence[google.cloud.gke_multicloud_v1.types.AwsNodePoolError]): Output only. A set of errors found in the node pool. + management (google.cloud.gke_multicloud_v1.types.AwsNodeManagement): + Optional. The Management configuration for + this node pool. + update_settings (google.cloud.gke_multicloud_v1.types.UpdateSettings): + Optional. Update settings control the speed + and disruption of the update. """ class State(proto.Enum): @@ -766,6 +838,100 @@ class State(proto.Enum): number=29, message="AwsNodePoolError", ) + management: "AwsNodeManagement" = proto.Field( + proto.MESSAGE, + number=30, + message="AwsNodeManagement", + ) + update_settings: "UpdateSettings" = proto.Field( + proto.MESSAGE, + number=32, + message="UpdateSettings", + ) + + +class UpdateSettings(proto.Message): + r"""UpdateSettings control the level of parallelism and the level of + disruption caused during the update of a node pool. + + These settings are applicable when the node pool update requires + replacing the existing node pool nodes with the updated ones. + + UpdateSettings are optional. When UpdateSettings are not specified + during the node pool creation, a default is chosen based on the + parent cluster's version. For clusters with minor version 1.27 and + later, a default surge_settings configuration with max_surge = 1 and + max_unavailable = 0 is used. For clusters with older versions, node + pool updates use the traditional rolling update mechanism of + updating one node at a time in a "terminate before create" fashion + and update_settings is not applicable. + + Set the surge_settings parameter to use the Surge Update mechanism + for the rolling update of node pool nodes. + + 1. max_surge controls the number of additional nodes that can be + created beyond the current size of the node pool temporarily for + the time of the update to increase the number of available nodes. + 2. max_unavailable controls the number of nodes that can be + simultaneously unavailable during the update. + 3. (max_surge + max_unavailable) determines the level of parallelism + (i.e., the number of nodes being updated at the same time). + + Attributes: + surge_settings (google.cloud.gke_multicloud_v1.types.SurgeSettings): + Optional. Settings for surge update. + """ + + surge_settings: "SurgeSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="SurgeSettings", + ) + + +class SurgeSettings(proto.Message): + r"""SurgeSettings contains the parameters for Surge update. + + Attributes: + max_surge (int): + Optional. The maximum number of nodes that + can be created beyond the current size of the + node pool during the update process. + max_unavailable (int): + Optional. The maximum number of nodes that + can be simultaneously unavailable during the + update process. A node is considered unavailable + if its status is not Ready. + """ + + max_surge: int = proto.Field( + proto.INT32, + number=1, + ) + max_unavailable: int = proto.Field( + proto.INT32, + number=2, + ) + + +class AwsNodeManagement(proto.Message): + r"""AwsNodeManagement defines the set of node management features + turned on for an AWS node pool. + + Attributes: + auto_repair (bool): + Optional. Whether or not the nodes will be + automatically repaired. When set to true, the + nodes in this node pool will be monitored and if + they fail health checks consistently over a + period of time, an automatic repair action will + be triggered to replace them with new nodes. + """ + + auto_repair: bool = proto.Field( + proto.BOOL, + number=1, + ) class AwsNodeConfig(proto.Message): @@ -773,10 +939,11 @@ class AwsNodeConfig(proto.Message): Attributes: instance_type (str): - Optional. The AWS instance type. - - When unspecified, it uses a default based on the - node pool's version. + Optional. The EC2 instance type when creating + on-Demand instances. + If unspecified during node pool creation, a + default will be chosen based on the node pool + version, and assigned to this field. root_volume (google.cloud.gke_multicloud_v1.types.AwsVolumeTemplate): Optional. Template for the root volume provisioned for node pool nodes. Volumes will be @@ -804,8 +971,7 @@ class AwsNodeConfig(proto.Message): assigned to nodes in the pool. image_type (str): Optional. The OS image type to use on node pool instances. - Can have a value of ``ubuntu``, or ``windows`` if the - cluster enables the Windows node pool preview feature. + Can be unspecified, or have a value of ``ubuntu``. When unspecified, it defaults to ``ubuntu``. ssh_config (google.cloud.gke_multicloud_v1.types.AwsSshConfig): @@ -831,6 +997,12 @@ class AwsNodeConfig(proto.Message): When unspecified, metrics collection is disabled. + spot_config (google.cloud.gke_multicloud_v1.types.SpotConfig): + Optional. Configuration for provisioning EC2 Spot instances + + When specified, the node pool will provision Spot instances + from the set of spot_config.instance_types. This field is + mutually exclusive with ``instance_type``. """ instance_type: str = proto.Field( @@ -896,6 +1068,11 @@ class AwsNodeConfig(proto.Message): message="AwsAutoscalingGroupMetricsCollection", ) ) + spot_config: "SpotConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="SpotConfig", + ) class AwsNodePoolAutoscaling(proto.Message): @@ -924,6 +1101,75 @@ class AwsNodePoolAutoscaling(proto.Message): ) +class AwsOpenIdConfig(proto.Message): + r"""AwsOpenIdConfig is an OIDC discovery document for the + cluster. See the OpenID Connect Discovery 1.0 specification for + details. + + Attributes: + issuer (str): + OIDC Issuer. + jwks_uri (str): + JSON Web Key uri. + response_types_supported (MutableSequence[str]): + Supported response types. + subject_types_supported (MutableSequence[str]): + Supported subject types. + id_token_signing_alg_values_supported (MutableSequence[str]): + supported ID Token signing Algorithms. + claims_supported (MutableSequence[str]): + Supported claims. + grant_types (MutableSequence[str]): + Supported grant types. + """ + + issuer: str = proto.Field( + proto.STRING, + number=1, + ) + jwks_uri: str = proto.Field( + proto.STRING, + number=2, + ) + response_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + subject_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + id_token_signing_alg_values_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + claims_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + grant_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + +class AwsJsonWebKeys(proto.Message): + r"""AwsJsonWebKeys is a valid JSON Web Key Set as specififed in + RFC 7517. + + Attributes: + keys (MutableSequence[google.cloud.gke_multicloud_v1.types.Jwk]): + The public component of the keys used by the + cluster to sign token requests. + """ + + keys: MutableSequence[common_resources.Jwk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_resources.Jwk, + ) + + class AwsServerConfig(proto.Message): r"""AwsServerConfig is the configuration of GKE cluster on AWS. @@ -931,7 +1177,11 @@ class AwsServerConfig(proto.Message): name (str): The resource name of the config. valid_versions (MutableSequence[google.cloud.gke_multicloud_v1.types.AwsK8sVersionInfo]): - List of valid Kubernetes versions. + List of all released Kubernetes versions, including ones + which are end of life and can no longer be used. Filter by + the ``enabled`` property to limit to currently available + versions. Valid versions supported for both create and + update operations supported_aws_regions (MutableSequence[str]): The list of supported AWS regions. """ @@ -957,12 +1207,51 @@ class AwsK8sVersionInfo(proto.Message): Attributes: version (str): Kubernetes version name. + enabled (bool): + Optional. True if the version is available + for cluster creation. If a version is enabled + for creation, it can be used to create new + clusters. Otherwise, cluster creation will fail. + However, cluster upgrade operations may succeed, + even if the version is not enabled. + end_of_life (bool): + Optional. True if this cluster version + belongs to a minor version that has reached its + end of life and is no longer in scope to receive + security and bug fixes. + end_of_life_date (google.type.date_pb2.Date): + Optional. The estimated date (in Pacific Time) when this + cluster version will reach its end of life. Or if this + version is no longer supported (the ``end_of_life`` field is + true), this is the actual date (in Pacific time) when the + version reached its end of life. + release_date (google.type.date_pb2.Date): + Optional. The date (in Pacific Time) when the + cluster version was released. """ version: str = proto.Field( proto.STRING, number=1, ) + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + end_of_life: bool = proto.Field( + proto.BOOL, + number=4, + ) + end_of_life_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=5, + message=date_pb2.Date, + ) + release_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=6, + message=date_pb2.Date, + ) class AwsSshConfig(proto.Message): @@ -1087,6 +1376,21 @@ class AwsAutoscalingGroupMetricsCollection(proto.Message): ) +class SpotConfig(proto.Message): + r"""SpotConfig has configuration info for Spot node. + + Attributes: + instance_types (MutableSequence[str]): + Required. A list of instance types for + creating spot node pool. + """ + + instance_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class AwsClusterError(proto.Message): r"""AwsClusterError describes errors found on AWS clusters. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_service.py index d2cbcf417f8e..0961d66da21a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_service.py @@ -34,13 +34,18 @@ "DeleteAwsClusterRequest", "CreateAwsNodePoolRequest", "UpdateAwsNodePoolRequest", + "RollbackAwsNodePoolUpdateRequest", "GetAwsNodePoolRequest", "ListAwsNodePoolsRequest", "ListAwsNodePoolsResponse", "DeleteAwsNodePoolRequest", + "GetAwsOpenIdConfigRequest", + "GetAwsJsonWebKeysRequest", "GetAwsServerConfigRequest", "GenerateAwsAccessTokenRequest", "GenerateAwsAccessTokenResponse", + "GenerateAwsClusterAgentTokenRequest", + "GenerateAwsClusterAgentTokenResponse", }, ) @@ -120,6 +125,8 @@ class UpdateAwsClusterRequest(proto.Message): - ``annotations``. - ``control_plane.version``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. + - ``binary_authorization.evaluation_mode``. - ``control_plane.aws_services_authentication.role_arn``. - ``control_plane.aws_services_authentication.role_session_name``. - ``control_plane.config_encryption.kms_key_arn``. @@ -131,6 +138,7 @@ class UpdateAwsClusterRequest(proto.Message): - ``control_plane.root_volume.size_gib``. - ``control_plane.root_volume.volume_type``. - ``control_plane.root_volume.iops``. + - ``control_plane.root_volume.throughput``. - ``control_plane.root_volume.kms_key_arn``. - ``control_plane.ssh_config``. - ``control_plane.ssh_config.ec2_key_pair``. @@ -139,6 +147,7 @@ class UpdateAwsClusterRequest(proto.Message): - ``logging_config.component_config.enable_components``. - ``control_plane.tags``. - ``monitoring_config.managed_prometheus_config.enabled``. + - ``networking.per_node_pool_sg_rules_disabled``. """ aws_cluster: aws_resources.AwsCluster = proto.Field( @@ -280,6 +289,12 @@ class DeleteAwsClusterRequest(proto.Message): [Operation][google.longrunning.Operation] will be returned. Useful for idempotent deletion. + ignore_errors (bool): + Optional. If set to true, the deletion of + [AwsCluster][google.cloud.gkemulticloud.v1.AwsCluster] + resource will succeed even if errors occur during deleting + in cluster resources. Using this parameter may result in + orphaned resources in the cluster. etag (str): The current etag of the [AwsCluster][google.cloud.gkemulticloud.v1.AwsCluster]. @@ -304,6 +319,10 @@ class DeleteAwsClusterRequest(proto.Message): proto.BOOL, number=3, ) + ignore_errors: bool = proto.Field( + proto.BOOL, + number=5, + ) etag: str = proto.Field( proto.STRING, number=4, @@ -388,6 +407,7 @@ class UpdateAwsNodePoolRequest(proto.Message): - ``config.config_encryption.kms_key_arn``. - ``config.security_group_ids``. - ``config.root_volume.iops``. + - ``config.root_volume.throughput``. - ``config.root_volume.kms_key_arn``. - ``config.root_volume.volume_type``. - ``config.root_volume.size_gib``. @@ -403,6 +423,13 @@ class UpdateAwsNodePoolRequest(proto.Message): - ``config.autoscaling_metrics_collection``. - ``config.autoscaling_metrics_collection.granularity``. - ``config.autoscaling_metrics_collection.metrics``. + - ``config.instance_type``. + - ``management.auto_repair``. + - ``management``. + - ``update_settings``. + - ``update_settings.surge_settings``. + - ``update_settings.surge_settings.max_surge``. + - ``update_settings.surge_settings.max_unavailable``. """ aws_node_pool: aws_resources.AwsNodePool = proto.Field( @@ -421,6 +448,38 @@ class UpdateAwsNodePoolRequest(proto.Message): ) +class RollbackAwsNodePoolUpdateRequest(proto.Message): + r"""Request message for ``AwsClusters.RollbackAwsNodePoolUpdate`` + method. + + Attributes: + name (str): + Required. The name of the + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] + resource to rollback. + + ``AwsNodePool`` names are formatted as + ``projects//locations//awsClusters//awsNodePools/``. + + See `Resource + Names `__ + for more details on Google Cloud resource names. + respect_pdb (bool): + Optional. Option for rollback to ignore the + PodDisruptionBudget when draining the node pool + nodes. Default value is false. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + respect_pdb: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class GetAwsNodePoolRequest(proto.Message): r"""Request message for ``AwsClusters.GetAwsNodePool`` method. @@ -544,6 +603,12 @@ class DeleteAwsNodePoolRequest(proto.Message): [Operation][google.longrunning.Operation] will be returned. Useful for idempotent deletion. + ignore_errors (bool): + Optional. If set to true, the deletion of + [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool] + resource will succeed even if errors occur during deleting + in node pool resources. Using this parameter may result in + orphaned resources in the node pool. etag (str): The current ETag of the [AwsNodePool][google.cloud.gkemulticloud.v1.AwsNodePool]. @@ -568,12 +633,55 @@ class DeleteAwsNodePoolRequest(proto.Message): proto.BOOL, number=3, ) + ignore_errors: bool = proto.Field( + proto.BOOL, + number=5, + ) etag: str = proto.Field( proto.STRING, number=4, ) +class GetAwsOpenIdConfigRequest(proto.Message): + r"""GetAwsOpenIdConfigRequest gets the OIDC discovery document + for the cluster. See the OpenID Connect Discovery 1.0 + specification for details. + + Attributes: + aws_cluster (str): + Required. The AwsCluster, which owns the OIDC + discovery document. Format: + + projects/{project}/locations/{location}/awsClusters/{cluster} + """ + + aws_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetAwsJsonWebKeysRequest(proto.Message): + r"""GetAwsJsonWebKeysRequest gets the public component of the keys used + by the cluster to sign token requests. This will be the jwks_uri for + the discover document returned by getOpenIDConfig. See the OpenID + Connect Discovery 1.0 specification for details. + + Attributes: + aws_cluster (str): + Required. The AwsCluster, which owns the + JsonWebKeys. Format: + + projects/{project}/locations/{location}/awsClusters/{cluster} + """ + + aws_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + class GetAwsServerConfigRequest(proto.Message): r"""GetAwsServerConfigRequest gets the server config of GKE cluster on AWS. @@ -644,4 +752,98 @@ class GenerateAwsAccessTokenResponse(proto.Message): ) +class GenerateAwsClusterAgentTokenRequest(proto.Message): + r""" + + Attributes: + aws_cluster (str): + Required. + subject_token (str): + Required. + subject_token_type (str): + Required. + version (str): + Required. + node_pool_id (str): + Optional. + grant_type (str): + Optional. + audience (str): + Optional. + scope (str): + Optional. + requested_token_type (str): + Optional. + options (str): + Optional. + """ + + aws_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + subject_token: str = proto.Field( + proto.STRING, + number=2, + ) + subject_token_type: str = proto.Field( + proto.STRING, + number=3, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=5, + ) + grant_type: str = proto.Field( + proto.STRING, + number=6, + ) + audience: str = proto.Field( + proto.STRING, + number=7, + ) + scope: str = proto.Field( + proto.STRING, + number=8, + ) + requested_token_type: str = proto.Field( + proto.STRING, + number=9, + ) + options: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GenerateAwsClusterAgentTokenResponse(proto.Message): + r""" + + Attributes: + access_token (str): + + expires_in (int): + + token_type (str): + + """ + + access_token: str = proto.Field( + proto.STRING, + number=1, + ) + expires_in: int = proto.Field( + proto.INT32, + number=2, + ) + token_type: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py index 08a12c09cdb6..83cdcd2467a1 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_resources.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore import proto # type: ignore from google.cloud.gke_multicloud_v1.types import common_resources @@ -37,9 +38,13 @@ "AzureAuthorization", "AzureServicesAuthentication", "AzureClusterUser", + "AzureClusterGroup", "AzureNodePool", + "AzureNodeManagement", "AzureNodeConfig", "AzureNodePoolAutoscaling", + "AzureOpenIdConfig", + "AzureJsonWebKeys", "AzureServerConfig", "AzureK8sVersionInfo", "AzureSshConfig", @@ -85,6 +90,9 @@ class AzureCluster(proto.Message): that contains authentication configuration for how the Anthos Multi-Cloud API connects to Azure APIs. + Either azure_client or azure_services_authentication should + be provided. + The ``AzureClient`` resource must reside on the same Google Cloud Platform project and region as the ``AzureCluster``. @@ -104,8 +112,11 @@ class AzureCluster(proto.Message): Required. Configuration related to the cluster RBAC settings. azure_services_authentication (google.cloud.gke_multicloud_v1.types.AzureServicesAuthentication): - Optional. Authentication configuration for - management of Azure resources. + Optional. Authentication configuration for management of + Azure resources. + + Either azure_client or azure_services_authentication should + be provided. state (google.cloud.gke_multicloud_v1.types.AzureCluster.State): Output only. The current state of the cluster. @@ -741,11 +752,19 @@ class AzureAuthorization(proto.Message): Attributes: admin_users (MutableSequence[google.cloud.gke_multicloud_v1.types.AzureClusterUser]): - Required. Users that can perform operations as a cluster + Optional. Users that can perform operations as a cluster admin. A managed ClusterRoleBinding will be created to grant the ``cluster-admin`` ClusterRole to the users. Up to ten admin users can be provided. + For more info on RBAC, see + https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles + admin_groups (MutableSequence[google.cloud.gke_multicloud_v1.types.AzureClusterGroup]): + Optional. Groups of users that can perform operations as a + cluster admin. A managed ClusterRoleBinding will be created + to grant the ``cluster-admin`` ClusterRole to the groups. Up + to ten admin groups can be provided. + For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles """ @@ -755,6 +774,11 @@ class AzureAuthorization(proto.Message): number=1, message="AzureClusterUser", ) + admin_groups: MutableSequence["AzureClusterGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AzureClusterGroup", + ) class AzureServicesAuthentication(proto.Message): @@ -795,6 +819,21 @@ class AzureClusterUser(proto.Message): ) +class AzureClusterGroup(proto.Message): + r"""Identities of a group-type subject for Azure clusters. + + Attributes: + group (str): + Required. The name of the group, e.g. + ``my-group@domain.com``. + """ + + group: str = proto.Field( + proto.STRING, + number=1, + ) + + class AzureNodePool(proto.Message): r"""An Anthos node pool running on Azure. @@ -867,6 +906,9 @@ class AzureNodePool(proto.Message): errors (MutableSequence[google.cloud.gke_multicloud_v1.types.AzureNodePoolError]): Output only. A set of errors found in the node pool. + management (google.cloud.gke_multicloud_v1.types.AzureNodeManagement): + Optional. The Management configuration for + this node pool. """ class State(proto.Enum): @@ -971,6 +1013,31 @@ class State(proto.Enum): number=29, message="AzureNodePoolError", ) + management: "AzureNodeManagement" = proto.Field( + proto.MESSAGE, + number=30, + message="AzureNodeManagement", + ) + + +class AzureNodeManagement(proto.Message): + r"""AzureNodeManagement defines the set of node management + features turned on for an Azure node pool. + + Attributes: + auto_repair (bool): + Optional. Whether or not the nodes will be + automatically repaired. When set to true, the + nodes in this node pool will be monitored and if + they fail health checks consistently over a + period of time, an automatic repair action will + be triggered to replace them with new nodes. + """ + + auto_repair: bool = proto.Field( + proto.BOOL, + number=1, + ) class AzureNodeConfig(proto.Message): @@ -1003,8 +1070,7 @@ class AzureNodeConfig(proto.Message): characters. Values can be up to 255 Unicode characters. image_type (str): Optional. The OS image type to use on node pool instances. - Can have a value of ``ubuntu``, or ``windows`` if the - cluster enables the Windows node pool preview feature. + Can be unspecified, or have a value of ``ubuntu``. When unspecified, it defaults to ``ubuntu``. ssh_config (google.cloud.gke_multicloud_v1.types.AzureSshConfig): @@ -1098,6 +1164,75 @@ class AzureNodePoolAutoscaling(proto.Message): ) +class AzureOpenIdConfig(proto.Message): + r"""AzureOpenIdConfig is an OIDC discovery document for the + cluster. See the OpenID Connect Discovery 1.0 specification for + details. + + Attributes: + issuer (str): + OIDC Issuer. + jwks_uri (str): + JSON Web Key uri. + response_types_supported (MutableSequence[str]): + Supported response types. + subject_types_supported (MutableSequence[str]): + Supported subject types. + id_token_signing_alg_values_supported (MutableSequence[str]): + supported ID Token signing Algorithms. + claims_supported (MutableSequence[str]): + Supported claims. + grant_types (MutableSequence[str]): + Supported grant types. + """ + + issuer: str = proto.Field( + proto.STRING, + number=1, + ) + jwks_uri: str = proto.Field( + proto.STRING, + number=2, + ) + response_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + subject_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + id_token_signing_alg_values_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + claims_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + grant_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + +class AzureJsonWebKeys(proto.Message): + r"""AzureJsonWebKeys is a valid JSON Web Key Set as specififed in + RFC 7517. + + Attributes: + keys (MutableSequence[google.cloud.gke_multicloud_v1.types.Jwk]): + The public component of the keys used by the + cluster to sign token requests. + """ + + keys: MutableSequence[common_resources.Jwk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=common_resources.Jwk, + ) + + class AzureServerConfig(proto.Message): r"""AzureServerConfig contains information about a Google Cloud location, such as supported Azure regions and Kubernetes @@ -1114,7 +1249,11 @@ class AzureServerConfig(proto.Message): Names `__ for more details on Google Cloud Platform resource names. valid_versions (MutableSequence[google.cloud.gke_multicloud_v1.types.AzureK8sVersionInfo]): - List of valid Kubernetes versions. + List of all released Kubernetes versions, including ones + which are end of life and can no longer be used. Filter by + the ``enabled`` property to limit to currently available + versions. Valid versions supported for both create and + update operations supported_azure_regions (MutableSequence[str]): The list of supported Azure regions. """ @@ -1135,18 +1274,56 @@ class AzureServerConfig(proto.Message): class AzureK8sVersionInfo(proto.Message): - r"""Information about a supported Kubernetes version. + r"""Kubernetes version information of GKE cluster on Azure. Attributes: version (str): - A supported Kubernetes version (for example, - ``1.19.10-gke.1000``) + Kubernetes version name (for example, ``1.19.10-gke.1000``) + enabled (bool): + Optional. True if the version is available + for cluster creation. If a version is enabled + for creation, it can be used to create new + clusters. Otherwise, cluster creation will fail. + However, cluster upgrade operations may succeed, + even if the version is not enabled. + end_of_life (bool): + Optional. True if this cluster version + belongs to a minor version that has reached its + end of life and is no longer in scope to receive + security and bug fixes. + end_of_life_date (google.type.date_pb2.Date): + Optional. The estimated date (in Pacific Time) when this + cluster version will reach its end of life. Or if this + version is no longer supported (the ``end_of_life`` field is + true), this is the actual date (in Pacific time) when the + version reached its end of life. + release_date (google.type.date_pb2.Date): + Optional. The date (in Pacific Time) when the + cluster version was released. """ version: str = proto.Field( proto.STRING, number=1, ) + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + end_of_life: bool = proto.Field( + proto.BOOL, + number=4, + ) + end_of_life_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=5, + message=date_pb2.Date, + ) + release_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=6, + message=date_pb2.Date, + ) class AzureSshConfig(proto.Message): diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_service.py index 9d9a84b94465..5174c517410b 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/azure_service.py @@ -38,6 +38,8 @@ "ListAzureNodePoolsRequest", "ListAzureNodePoolsResponse", "DeleteAzureNodePoolRequest", + "GetAzureOpenIdConfigRequest", + "GetAzureJsonWebKeysRequest", "GetAzureServerConfigRequest", "CreateAzureClientRequest", "GetAzureClientRequest", @@ -46,6 +48,8 @@ "DeleteAzureClientRequest", "GenerateAzureAccessTokenRequest", "GenerateAzureAccessTokenResponse", + "GenerateAzureClusterAgentTokenRequest", + "GenerateAzureClusterAgentTokenResponse", }, ) @@ -127,6 +131,7 @@ class UpdateAzureClusterRequest(proto.Message): - ``control_plane.vm_size``. - ``annotations``. - ``authorization.admin_users``. + - ``authorization.admin_groups``. - ``control_plane.root_volume.size_gib``. - ``azure_services_authentication``. - ``azure_services_authentication.tenant_id``. @@ -253,7 +258,7 @@ def raw_page(self): class DeleteAzureClusterRequest(proto.Message): - r"""Request message for ``Clusters.DeleteAzureCluster`` method. + r"""Request message for ``AzureClusters.DeleteAzureCluster`` method. Attributes: name (str): @@ -317,8 +322,8 @@ class CreateAzureNodePoolRequest(proto.Message): [AzureCluster][google.cloud.gkemulticloud.v1.AzureCluster] resource where this node pool will be created. - Location names are formatted as - ``projects//locations/``. + ``AzureCluster`` names are formatted as + ``projects//locations//azureClusters/``. See `Resource Names `__ @@ -385,6 +390,8 @@ class UpdateAzureNodePoolRequest(proto.Message): - ``autoscaling.min_node_count``. - ``autoscaling.max_node_count``. - ``config.ssh_config.authorized_key``. + - ``management.auto_repair``. + - ``management``. """ azure_node_pool: azure_resources.AzureNodePool = proto.Field( @@ -503,7 +510,7 @@ def raw_page(self): class DeleteAzureNodePoolRequest(proto.Message): - r"""Delete message for ``AzureClusters.DeleteAzureNodePool`` method. + r"""Request message for ``AzureClusters.DeleteAzureNodePool`` method. Attributes: name (str): @@ -558,6 +565,45 @@ class DeleteAzureNodePoolRequest(proto.Message): ) +class GetAzureOpenIdConfigRequest(proto.Message): + r"""GetAzureOpenIdConfigRequest gets the OIDC discovery document + for the cluster. See the OpenID Connect Discovery 1.0 + specification for details. + + Attributes: + azure_cluster (str): + Required. The AzureCluster, which owns the + OIDC discovery document. Format: + + projects//locations//azureClusters/ + """ + + azure_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetAzureJsonWebKeysRequest(proto.Message): + r"""GetAzureJsonWebKeysRequest gets the public component of the keys + used by the cluster to sign token requests. This will be the + jwks_uri for the discover document returned by getOpenIDConfig. See + the OpenID Connect Discovery 1.0 specification for details. + + Attributes: + azure_cluster (str): + Required. The AzureCluster, which owns the + JsonWebKeys. Format: + + projects//locations//azureClusters/ + """ + + azure_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + class GetAzureServerConfigRequest(proto.Message): r"""GetAzureServerConfigRequest gets the server config of GKE cluster on Azure. @@ -788,7 +834,7 @@ class GenerateAzureAccessTokenRequest(proto.Message): resource to authenticate to. ``AzureCluster`` names are formatted as - ``projects//locations//AzureClusters/``. + ``projects//locations//azureClusters/``. See `Resource Names `__ @@ -825,4 +871,98 @@ class GenerateAzureAccessTokenResponse(proto.Message): ) +class GenerateAzureClusterAgentTokenRequest(proto.Message): + r""" + + Attributes: + azure_cluster (str): + Required. + subject_token (str): + Required. + subject_token_type (str): + Required. + version (str): + Required. + node_pool_id (str): + Optional. + grant_type (str): + Optional. + audience (str): + Optional. + scope (str): + Optional. + requested_token_type (str): + Optional. + options (str): + Optional. + """ + + azure_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + subject_token: str = proto.Field( + proto.STRING, + number=2, + ) + subject_token_type: str = proto.Field( + proto.STRING, + number=3, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=5, + ) + grant_type: str = proto.Field( + proto.STRING, + number=6, + ) + audience: str = proto.Field( + proto.STRING, + number=7, + ) + scope: str = proto.Field( + proto.STRING, + number=8, + ) + requested_token_type: str = proto.Field( + proto.STRING, + number=9, + ) + options: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GenerateAzureClusterAgentTokenResponse(proto.Message): + r""" + + Attributes: + access_token (str): + + expires_in (int): + + token_type (str): + + """ + + access_token: str = proto.Field( + proto.STRING, + number=1, + ) + expires_in: int = proto.Field( + proto.INT32, + number=2, + ) + token_type: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py index 4841f99fe8cf..78f2f80ae880 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py @@ -23,6 +23,7 @@ __protobuf__ = proto.module( package="google.cloud.gkemulticloud.v1", manifest={ + "Jwk", "WorkloadIdentityConfig", "MaxPodsConstraint", "OperationMetadata", @@ -32,10 +33,73 @@ "LoggingComponentConfig", "MonitoringConfig", "ManagedPrometheusConfig", + "BinaryAuthorization", }, ) +class Jwk(proto.Message): + r"""Jwk is a JSON Web Key as specified in RFC 7517. + + Attributes: + kty (str): + Key Type. + alg (str): + Algorithm. + use (str): + Permitted uses for the public keys. + kid (str): + Key ID. + n (str): + Used for RSA keys. + e (str): + Used for RSA keys. + x (str): + Used for ECDSA keys. + y (str): + Used for ECDSA keys. + crv (str): + Used for ECDSA keys. + """ + + kty: str = proto.Field( + proto.STRING, + number=1, + ) + alg: str = proto.Field( + proto.STRING, + number=2, + ) + use: str = proto.Field( + proto.STRING, + number=3, + ) + kid: str = proto.Field( + proto.STRING, + number=4, + ) + n: str = proto.Field( + proto.STRING, + number=5, + ) + e: str = proto.Field( + proto.STRING, + number=6, + ) + x: str = proto.Field( + proto.STRING, + number=7, + ) + y: str = proto.Field( + proto.STRING, + number=8, + ) + crv: str = proto.Field( + proto.STRING, + number=9, + ) + + class WorkloadIdentityConfig(proto.Message): r"""Workload Identity settings. @@ -316,4 +380,38 @@ class ManagedPrometheusConfig(proto.Message): ) +class BinaryAuthorization(proto.Message): + r"""Configuration for Binary Authorization. + + Attributes: + evaluation_mode (google.cloud.gke_multicloud_v1.types.BinaryAuthorization.EvaluationMode): + Mode of operation for binauthz policy + evaluation. If unspecified, defaults to + DISABLED. + """ + + class EvaluationMode(proto.Enum): + r"""Binary Authorization mode of operation. + + Values: + EVALUATION_MODE_UNSPECIFIED (0): + Default value + DISABLED (1): + Disable BinaryAuthorization + PROJECT_SINGLETON_POLICY_ENFORCE (2): + Enforce Kubernetes admission requests with + BinaryAuthorization using the project's + singleton policy. + """ + EVALUATION_MODE_UNSPECIFIED = 0 + DISABLED = 1 + PROJECT_SINGLETON_POLICY_ENFORCE = 2 + + evaluation_mode: EvaluationMode = proto.Field( + proto.ENUM, + number=1, + enum=EvaluationMode, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_async.py new file mode 100644 index 000000000000..a34835707979 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAttachedClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_generate_attached_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AttachedClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAttachedClusterAgentTokenRequest( + attached_cluster="attached_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_attached_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_sync.py new file mode 100644 index 000000000000..7475828d9022 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAttachedClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_generate_attached_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AttachedClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAttachedClusterAgentTokenRequest( + attached_cluster="attached_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_attached_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_async.py index a2683951f755..fb5535c92370 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_async.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_async.py @@ -50,7 +50,6 @@ async def sample_create_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAwsClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_sync.py index 9b31e806df8d..f1b258d0ee70 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_sync.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_create_aws_cluster_sync.py @@ -50,7 +50,6 @@ def sample_create_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAwsClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_async.py new file mode 100644 index 000000000000..ddcacf66d9d0 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAwsClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_generate_aws_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAwsClusterAgentTokenRequest( + aws_cluster="aws_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_aws_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_sync.py new file mode 100644 index 000000000000..dcf5812b25a4 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAwsClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_generate_aws_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAwsClusterAgentTokenRequest( + aws_cluster="aws_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_aws_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_async.py new file mode 100644 index 000000000000..f817d128855f --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAwsJsonWebKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_get_aws_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsJsonWebKeysRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = await client.get_aws_json_web_keys(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_sync.py new file mode 100644 index 000000000000..2d317c6b7627 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAwsJsonWebKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_get_aws_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsJsonWebKeysRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = client.get_aws_json_web_keys(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_async.py new file mode 100644 index 000000000000..04b3f6641826 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAwsOpenIdConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_get_aws_open_id_config(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsOpenIdConfigRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = await client.get_aws_open_id_config(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_sync.py new file mode 100644 index 000000000000..3fe8692744c5 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAwsOpenIdConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_get_aws_open_id_config(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAwsOpenIdConfigRequest( + aws_cluster="aws_cluster_value", + ) + + # Make the request + response = client.get_aws_open_id_config(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_async.py new file mode 100644 index 000000000000..3b4f1ee42dbb --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackAwsNodePoolUpdate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_rollback_aws_node_pool_update(): + # Create a client + client = gke_multicloud_v1.AwsClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.RollbackAwsNodePoolUpdateRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_aws_node_pool_update(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_sync.py new file mode 100644 index 000000000000..37bcc3c8a403 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackAwsNodePoolUpdate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_rollback_aws_node_pool_update(): + # Create a client + client = gke_multicloud_v1.AwsClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.RollbackAwsNodePoolUpdateRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_aws_node_pool_update(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py index 16f398b56a87..134a56449418 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py @@ -50,7 +50,6 @@ async def sample_update_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAwsClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py index 3be6027a04da..12142e6e2c6d 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py @@ -50,7 +50,6 @@ def sample_update_aws_cluster(): aws_cluster.control_plane.database_encryption.kms_key_arn = "kms_key_arn_value" aws_cluster.control_plane.aws_services_authentication.role_arn = "role_arn_value" aws_cluster.control_plane.config_encryption.kms_key_arn = "kms_key_arn_value" - aws_cluster.authorization.admin_users.username = "username_value" aws_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAwsClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py index 9fd52ad2def1..17e506016eea 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py @@ -47,7 +47,6 @@ async def sample_create_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAzureClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py index e859d5c26e3b..8b9793324463 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py @@ -47,7 +47,6 @@ def sample_create_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.CreateAzureClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_async.py new file mode 100644 index 000000000000..60391170e4e7 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAzureClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_generate_azure_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAzureClusterAgentTokenRequest( + azure_cluster="azure_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = await client.generate_azure_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_sync.py new file mode 100644 index 000000000000..86c039cd0a1d --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAzureClusterAgentToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_generate_azure_cluster_agent_token(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GenerateAzureClusterAgentTokenRequest( + azure_cluster="azure_cluster_value", + subject_token="subject_token_value", + subject_token_type="subject_token_type_value", + version="version_value", + ) + + # Make the request + response = client.generate_azure_cluster_agent_token(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_async.py new file mode 100644 index 000000000000..5ad2f8cf3bb0 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAzureJsonWebKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_get_azure_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureJsonWebKeysRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = await client.get_azure_json_web_keys(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_sync.py new file mode 100644 index 000000000000..083ba2ffbac7 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAzureJsonWebKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_get_azure_json_web_keys(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureJsonWebKeysRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = client.get_azure_json_web_keys(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_async.py new file mode 100644 index 000000000000..7381f3066e19 --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAzureOpenIdConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +async def sample_get_azure_open_id_config(): + # Create a client + client = gke_multicloud_v1.AzureClustersAsyncClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureOpenIdConfigRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = await client.get_azure_open_id_config(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_async] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_sync.py new file mode 100644 index 000000000000..8616d4dff02f --- /dev/null +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAzureOpenIdConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-gke-multicloud + + +# [START gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import gke_multicloud_v1 + + +def sample_get_azure_open_id_config(): + # Create a client + client = gke_multicloud_v1.AzureClustersClient() + + # Initialize request argument(s) + request = gke_multicloud_v1.GetAzureOpenIdConfigRequest( + azure_cluster="azure_cluster_value", + ) + + # Make the request + response = client.get_azure_open_id_config(request=request) + + # Handle the response + print(response) + +# [END gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_sync] diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_async.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_async.py index ee7b3ad1546b..efe1bcd07e8f 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_async.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_async.py @@ -47,7 +47,6 @@ async def sample_update_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAzureClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_sync.py b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_sync.py index 7b292a28e169..4246ba88991c 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_sync.py +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/gkemulticloud_v1_generated_azure_clusters_update_azure_cluster_sync.py @@ -47,7 +47,6 @@ def sample_update_azure_cluster(): azure_cluster.networking.service_address_cidr_blocks = ['service_address_cidr_blocks_value1', 'service_address_cidr_blocks_value2'] azure_cluster.control_plane.version = "version_value" azure_cluster.control_plane.ssh_config.authorized_key = "authorized_key_value" - azure_cluster.authorization.admin_users.username = "username_value" azure_cluster.fleet.project = "project_value" request = gke_multicloud_v1.UpdateAzureClusterRequest( diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index 328d9ef99290..baaa77b57773 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.6.4" + "version": "0.6.5" }, "snippets": [ { @@ -349,6 +349,159 @@ ], "title": "gkemulticloud_v1_generated_attached_clusters_delete_attached_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AttachedClustersAsyncClient", + "shortName": "AttachedClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AttachedClustersAsyncClient.generate_attached_cluster_agent_token", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AttachedClusters.GenerateAttachedClusterAgentToken", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AttachedClusters", + "shortName": "AttachedClusters" + }, + "shortName": "GenerateAttachedClusterAgentToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenResponse", + "shortName": "generate_attached_cluster_agent_token" + }, + "description": "Sample for GenerateAttachedClusterAgentToken", + "file": "gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AttachedClustersClient", + "shortName": "AttachedClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AttachedClustersClient.generate_attached_cluster_agent_token", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AttachedClusters.GenerateAttachedClusterAgentToken", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AttachedClusters", + "shortName": "AttachedClusters" + }, + "shortName": "GenerateAttachedClusterAgentToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAttachedClusterAgentTokenResponse", + "shortName": "generate_attached_cluster_agent_token" + }, + "description": "Sample for GenerateAttachedClusterAgentToken", + "file": "gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AttachedClusters_GenerateAttachedClusterAgentToken_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_attached_clusters_generate_attached_cluster_agent_token_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1396,12 +1549,12 @@ "regionTag": "gkemulticloud_v1_generated_AwsClusters_CreateAwsCluster_async", "segments": [ { - "end": 71, + "end": 70, "start": 27, "type": "FULL" }, { - "end": 71, + "end": 70, "start": 27, "type": "SHORT" }, @@ -1411,18 +1564,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 61, + "end": 60, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 68, - "start": 62, + "end": 67, + "start": 61, "type": "REQUEST_EXECUTION" }, { - "end": 72, - "start": 69, + "end": 71, + "start": 68, "type": "RESPONSE_HANDLING" } ], @@ -1484,12 +1637,12 @@ "regionTag": "gkemulticloud_v1_generated_AwsClusters_CreateAwsCluster_sync", "segments": [ { - "end": 71, + "end": 70, "start": 27, "type": "FULL" }, { - "end": 71, + "end": 70, "start": 27, "type": "SHORT" }, @@ -1499,18 +1652,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 61, + "end": 60, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 68, - "start": 62, + "end": 67, + "start": 61, "type": "REQUEST_EXECUTION" }, { - "end": 72, - "start": 69, + "end": 71, + "start": 68, "type": "RESPONSE_HANDLING" } ], @@ -2176,23 +2329,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.generate_aws_cluster_agent_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsCluster", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GenerateAwsClusterAgentToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsCluster" + "shortName": "GenerateAwsClusterAgentToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsClusterRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenRequest" }, { "name": "retry", @@ -2207,22 +2356,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsCluster", - "shortName": "get_aws_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse", + "shortName": "generate_aws_cluster_agent_token" }, - "description": "Sample for GetAwsCluster", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_async.py", + "description": "Sample for GenerateAwsClusterAgentToken", + "file": "gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsCluster_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2232,22 +2381,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_async.py" }, { "canonical": true, @@ -2256,23 +2405,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.generate_aws_cluster_agent_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsCluster", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GenerateAwsClusterAgentToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsCluster" + "shortName": "GenerateAwsClusterAgentToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsClusterRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenRequest" }, { "name": "retry", @@ -2287,22 +2432,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsCluster", - "shortName": "get_aws_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse", + "shortName": "generate_aws_cluster_agent_token" }, - "description": "Sample for GetAwsCluster", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_sync.py", + "description": "Sample for GenerateAwsClusterAgentToken", + "file": "gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsCluster_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GenerateAwsClusterAgentToken_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2312,22 +2457,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_generate_aws_cluster_agent_token_sync.py" }, { "canonical": true, @@ -2337,19 +2482,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsNodePool" + "shortName": "GetAwsCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsClusterRequest" }, { "name": "name", @@ -2368,14 +2513,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsNodePool", - "shortName": "get_aws_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsCluster", + "shortName": "get_aws_cluster" }, - "description": "Sample for GetAwsNodePool", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_async.py", + "description": "Sample for GetAwsCluster", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsNodePool_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsCluster_async", "segments": [ { "end": 51, @@ -2408,7 +2553,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_async.py" }, { "canonical": true, @@ -2417,19 +2562,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsNodePool" + "shortName": "GetAwsCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsClusterRequest" }, { "name": "name", @@ -2448,14 +2593,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsNodePool", - "shortName": "get_aws_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsCluster", + "shortName": "get_aws_cluster" }, - "description": "Sample for GetAwsNodePool", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_sync.py", + "description": "Sample for GetAwsCluster", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsNodePool_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsCluster_sync", "segments": [ { "end": 51, @@ -2488,7 +2633,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_cluster_sync.py" }, { "canonical": true, @@ -2498,23 +2643,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_server_config", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_json_web_keys", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsServerConfig", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsJsonWebKeys", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsServerConfig" + "shortName": "GetAwsJsonWebKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsServerConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsJsonWebKeysRequest" }, { "name": "retry", @@ -2529,14 +2670,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsServerConfig", - "shortName": "get_aws_server_config" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsJsonWebKeys", + "shortName": "get_aws_json_web_keys" }, - "description": "Sample for GetAwsServerConfig", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_async.py", + "description": "Sample for GetAwsJsonWebKeys", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsServerConfig_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_async", "segments": [ { "end": 51, @@ -2569,7 +2710,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_async.py" }, { "canonical": true, @@ -2578,23 +2719,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_server_config", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_json_web_keys", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsServerConfig", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsJsonWebKeys", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "GetAwsServerConfig" + "shortName": "GetAwsJsonWebKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAwsServerConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsJsonWebKeysRequest" }, { "name": "retry", @@ -2609,14 +2746,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AwsServerConfig", - "shortName": "get_aws_server_config" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsJsonWebKeys", + "shortName": "get_aws_json_web_keys" }, - "description": "Sample for GetAwsServerConfig", - "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_sync.py", + "description": "Sample for GetAwsJsonWebKeys", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsServerConfig_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsJsonWebKeys_sync", "segments": [ { "end": 51, @@ -2649,7 +2786,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_json_web_keys_sync.py" }, { "canonical": true, @@ -2659,22 +2796,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.list_aws_clusters", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsClusters", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "ListAwsClusters" + "shortName": "GetAwsNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.ListAwsClustersRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsNodePoolRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2690,22 +2827,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsClustersAsyncPager", - "shortName": "list_aws_clusters" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsNodePool", + "shortName": "get_aws_node_pool" }, - "description": "Sample for ListAwsClusters", - "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_async.py", + "description": "Sample for GetAwsNodePool", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsClusters_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsNodePool_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2725,12 +2862,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_async.py" }, { "canonical": true, @@ -2739,22 +2876,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.list_aws_clusters", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsClusters", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "ListAwsClusters" + "shortName": "GetAwsNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.ListAwsClustersRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsNodePoolRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2770,22 +2907,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsClustersPager", - "shortName": "list_aws_clusters" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsNodePool", + "shortName": "get_aws_node_pool" }, - "description": "Sample for ListAwsClusters", - "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_sync.py", + "description": "Sample for GetAwsNodePool", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsClusters_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsNodePool_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2805,12 +2942,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_node_pool_sync.py" }, { "canonical": true, @@ -2820,23 +2957,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.list_aws_node_pools", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_open_id_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsNodePools", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsOpenIdConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "ListAwsNodePools" + "shortName": "GetAwsOpenIdConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.ListAwsNodePoolsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsOpenIdConfigRequest" }, { "name": "retry", @@ -2851,22 +2984,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsNodePoolsAsyncPager", - "shortName": "list_aws_node_pools" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsOpenIdConfig", + "shortName": "get_aws_open_id_config" }, - "description": "Sample for ListAwsNodePools", - "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_async.py", + "description": "Sample for GetAwsOpenIdConfig", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsNodePools_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2886,12 +3019,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_async.py" }, { "canonical": true, @@ -2900,23 +3033,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.list_aws_node_pools", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_open_id_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsNodePools", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsOpenIdConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "ListAwsNodePools" + "shortName": "GetAwsOpenIdConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.ListAwsNodePoolsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsOpenIdConfigRequest" }, { "name": "retry", @@ -2931,22 +3060,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsNodePoolsPager", - "shortName": "list_aws_node_pools" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsOpenIdConfig", + "shortName": "get_aws_open_id_config" }, - "description": "Sample for ListAwsNodePools", - "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_sync.py", + "description": "Sample for GetAwsOpenIdConfig", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsNodePools_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsOpenIdConfig_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2966,12 +3095,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_open_id_config_sync.py" }, { "canonical": true, @@ -2981,27 +3110,23 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.update_aws_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.get_aws_server_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsCluster", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsServerConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "UpdateAwsCluster" + "shortName": "GetAwsServerConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsClusterRequest" - }, - { - "name": "aws_cluster", - "type": "google.cloud.gke_multicloud_v1.types.AwsCluster" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsServerConfigRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -3016,22 +3141,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_aws_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsServerConfig", + "shortName": "get_aws_server_config" }, - "description": "Sample for UpdateAwsCluster", - "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py", + "description": "Sample for GetAwsServerConfig", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsCluster_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsServerConfig_async", "segments": [ { - "end": 69, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 69, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3041,22 +3166,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 59, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 66, - "start": 60, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 70, - "start": 67, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_async.py" }, { "canonical": true, @@ -3065,27 +3190,23 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.update_aws_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.get_aws_server_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsCluster", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.GetAwsServerConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "UpdateAwsCluster" + "shortName": "GetAwsServerConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsClusterRequest" - }, - { - "name": "aws_cluster", - "type": "google.cloud.gke_multicloud_v1.types.AwsCluster" + "type": "google.cloud.gke_multicloud_v1.types.GetAwsServerConfigRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -3100,22 +3221,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_aws_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.AwsServerConfig", + "shortName": "get_aws_server_config" }, - "description": "Sample for UpdateAwsCluster", - "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py", + "description": "Sample for GetAwsServerConfig", + "file": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsCluster_sync", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_GetAwsServerConfig_sync", "segments": [ { - "end": 69, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 69, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3125,22 +3246,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 59, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 66, - "start": 60, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 70, - "start": 67, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py" + "title": "gkemulticloud_v1_generated_aws_clusters_get_aws_server_config_sync.py" }, { "canonical": true, @@ -3150,27 +3271,23 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", "shortName": "AwsClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.update_aws_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.list_aws_clusters", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsClusters", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "UpdateAwsNodePool" + "shortName": "ListAwsClusters" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsNodePoolRequest" - }, - { - "name": "aws_node_pool", - "type": "google.cloud.gke_multicloud_v1.types.AwsNodePool" + "type": "google.cloud.gke_multicloud_v1.types.ListAwsClustersRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" }, { "name": "retry", @@ -3185,22 +3302,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_aws_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsClustersAsyncPager", + "shortName": "list_aws_clusters" }, - "description": "Sample for UpdateAwsNodePool", - "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_async.py", + "description": "Sample for ListAwsClusters", + "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsNodePool_async", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsClusters_async", "segments": [ { - "end": 64, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3210,22 +3327,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_async.py" + "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_async.py" }, { "canonical": true, @@ -3234,26 +3351,682 @@ "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", "shortName": "AwsClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.update_aws_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.list_aws_clusters", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsClusters", "service": { "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", "shortName": "AwsClusters" }, - "shortName": "UpdateAwsNodePool" + "shortName": "ListAwsClusters" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.ListAwsClustersRequest" }, { - "name": "aws_node_pool", - "type": "google.cloud.gke_multicloud_v1.types.AwsNodePool" + "name": "parent", + "type": "str" }, { - "name": "update_mask", + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsClustersPager", + "shortName": "list_aws_clusters" + }, + "description": "Sample for ListAwsClusters", + "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", + "shortName": "AwsClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.list_aws_node_pools", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsNodePools", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "ListAwsNodePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.ListAwsNodePoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsNodePoolsAsyncPager", + "shortName": "list_aws_node_pools" + }, + "description": "Sample for ListAwsNodePools", + "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsNodePools_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", + "shortName": "AwsClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.list_aws_node_pools", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.ListAwsNodePools", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "ListAwsNodePools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.ListAwsNodePoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gke_multicloud_v1.services.aws_clusters.pagers.ListAwsNodePoolsPager", + "shortName": "list_aws_node_pools" + }, + "description": "Sample for ListAwsNodePools", + "file": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_ListAwsNodePools_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_list_aws_node_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", + "shortName": "AwsClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.rollback_aws_node_pool_update", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.RollbackAwsNodePoolUpdate", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "RollbackAwsNodePoolUpdate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.RollbackAwsNodePoolUpdateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "rollback_aws_node_pool_update" + }, + "description": "Sample for RollbackAwsNodePoolUpdate", + "file": "gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", + "shortName": "AwsClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.rollback_aws_node_pool_update", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.RollbackAwsNodePoolUpdate", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "RollbackAwsNodePoolUpdate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.RollbackAwsNodePoolUpdateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "rollback_aws_node_pool_update" + }, + "description": "Sample for RollbackAwsNodePoolUpdate", + "file": "gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_RollbackAwsNodePoolUpdate_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_rollback_aws_node_pool_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", + "shortName": "AwsClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.update_aws_cluster", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsCluster", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "UpdateAwsCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsClusterRequest" + }, + { + "name": "aws_cluster", + "type": "google.cloud.gke_multicloud_v1.types.AwsCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_aws_cluster" + }, + "description": "Sample for UpdateAwsCluster", + "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsCluster_async", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", + "shortName": "AwsClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.update_aws_cluster", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsCluster", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "UpdateAwsCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsClusterRequest" + }, + { + "name": "aws_cluster", + "type": "google.cloud.gke_multicloud_v1.types.AwsCluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_aws_cluster" + }, + "description": "Sample for UpdateAwsCluster", + "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsCluster_sync", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient", + "shortName": "AwsClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersAsyncClient.update_aws_node_pool", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsNodePool", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "UpdateAwsNodePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsNodePoolRequest" + }, + { + "name": "aws_node_pool", + "type": "google.cloud.gke_multicloud_v1.types.AwsNodePool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_aws_node_pool" + }, + "description": "Sample for UpdateAwsNodePool", + "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsNodePool_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient", + "shortName": "AwsClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AwsClustersClient.update_aws_node_pool", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters.UpdateAwsNodePool", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AwsClusters", + "shortName": "AwsClusters" + }, + "shortName": "UpdateAwsNodePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.UpdateAwsNodePoolRequest" + }, + { + "name": "aws_node_pool", + "type": "google.cloud.gke_multicloud_v1.types.AwsNodePool" + }, + { + "name": "update_mask", "type": "google.protobuf.field_mask_pb2.FieldMask" }, { @@ -3269,22 +4042,465 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_aws_node_pool" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_aws_node_pool" + }, + "description": "Sample for UpdateAwsNodePool", + "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsNodePool_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", + "shortName": "AzureClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_client", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureClient", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "CreateAzureClient" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClientRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "azure_client", + "type": "google.cloud.gke_multicloud_v1.types.AzureClient" + }, + { + "name": "azure_client_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_azure_client" + }, + "description": "Sample for CreateAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureClient_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", + "shortName": "AzureClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_client", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureClient", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "CreateAzureClient" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClientRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "azure_client", + "type": "google.cloud.gke_multicloud_v1.types.AzureClient" + }, + { + "name": "azure_client_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_azure_client" + }, + "description": "Sample for CreateAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureClient_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", + "shortName": "AzureClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_cluster", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureCluster", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "CreateAzureCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "azure_cluster", + "type": "google.cloud.gke_multicloud_v1.types.AzureCluster" + }, + { + "name": "azure_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_azure_cluster" + }, + "description": "Sample for CreateAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureCluster_async", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", + "shortName": "AzureClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_cluster", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureCluster", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "CreateAzureCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "azure_cluster", + "type": "google.cloud.gke_multicloud_v1.types.AzureCluster" + }, + { + "name": "azure_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_azure_cluster" + }, + "description": "Sample for CreateAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureCluster_sync", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", + "shortName": "AzureClustersAsyncClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_node_pool", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureNodePool", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "CreateAzureNodePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureNodePoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "azure_node_pool", + "type": "google.cloud.gke_multicloud_v1.types.AzureNodePool" + }, + { + "name": "azure_node_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_azure_node_pool" }, - "description": "Sample for UpdateAwsNodePool", - "file": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_sync.py", + "description": "Sample for CreateAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AwsClusters_UpdateAwsNodePool_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureNodePool_async", "segments": [ { - "end": 64, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 65, "start": 27, "type": "SHORT" }, @@ -3294,55 +4510,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_aws_clusters_update_aws_node_pool_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", - "shortName": "AzureClustersAsyncClient" + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", + "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureClient" + "shortName": "CreateAzureNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClientRequest" + "type": "google.cloud.gke_multicloud_v1.types.CreateAzureNodePoolRequest" }, { "name": "parent", "type": "str" }, { - "name": "azure_client", - "type": "google.cloud.gke_multicloud_v1.types.AzureClient" + "name": "azure_node_pool", + "type": "google.cloud.gke_multicloud_v1.types.AzureNodePool" }, { - "name": "azure_client_id", + "name": "azure_node_pool_id", "type": "str" }, { @@ -3358,22 +4573,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_azure_client" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_azure_node_pool" }, - "description": "Sample for CreateAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_async.py", + "description": "Sample for CreateAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureClient_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureNodePool_sync", "segments": [ { - "end": 61, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 65, "start": 27, "type": "SHORT" }, @@ -3383,54 +4598,127 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_sync.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", - "shortName": "AzureClustersClient" + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", + "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_client", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureClient", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureClient" + "shortName": "DeleteAzureClient" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClientRequest" + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClientRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "azure_client", - "type": "google.cloud.gke_multicloud_v1.types.AzureClient" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "azure_client_id", + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_azure_client" + }, + "description": "Sample for DeleteAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureClient_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", + "shortName": "AzureClustersClient" + }, + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_client", + "method": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureClient", + "service": { + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", + "shortName": "AzureClusters" + }, + "shortName": "DeleteAzureClient" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClientRequest" + }, + { + "name": "name", "type": "str" }, { @@ -3447,21 +4735,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_azure_client" + "shortName": "delete_azure_client" }, - "description": "Sample for CreateAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_sync.py", + "description": "Sample for DeleteAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureClient_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureClient_sync", "segments": [ { - "end": 61, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3471,22 +4759,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_client_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_sync.py" }, { "canonical": true, @@ -3496,30 +4784,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureCluster" + "shortName": "DeleteAzureCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClusterRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "azure_cluster", - "type": "google.cloud.gke_multicloud_v1.types.AzureCluster" + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest" }, { - "name": "azure_cluster_id", + "name": "name", "type": "str" }, { @@ -3536,21 +4816,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_azure_cluster" + "shortName": "delete_azure_cluster" }, - "description": "Sample for CreateAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py", + "description": "Sample for DeleteAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureCluster_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureCluster_async", "segments": [ { - "end": 68, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 68, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3560,22 +4840,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 58, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 65, - "start": 59, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 69, - "start": 66, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_async.py" }, { "canonical": true, @@ -3584,30 +4864,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureCluster" + "shortName": "DeleteAzureCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureClusterRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "azure_cluster", - "type": "google.cloud.gke_multicloud_v1.types.AzureCluster" + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest" }, { - "name": "azure_cluster_id", + "name": "name", "type": "str" }, { @@ -3624,21 +4896,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_azure_cluster" + "shortName": "delete_azure_cluster" }, - "description": "Sample for CreateAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py", + "description": "Sample for DeleteAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureCluster_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureCluster_sync", "segments": [ { - "end": 68, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 68, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3648,22 +4920,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 58, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 65, - "start": 59, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 69, - "start": 66, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_cluster_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_sync.py" }, { "canonical": true, @@ -3673,30 +4945,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.create_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureNodePool" + "shortName": "DeleteAzureNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureNodePoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "azure_node_pool", - "type": "google.cloud.gke_multicloud_v1.types.AzureNodePool" + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest" }, { - "name": "azure_node_pool_id", + "name": "name", "type": "str" }, { @@ -3713,21 +4977,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_azure_node_pool" + "shortName": "delete_azure_node_pool" }, - "description": "Sample for CreateAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_async.py", + "description": "Sample for DeleteAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureNodePool_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureNodePool_async", "segments": [ { - "end": 65, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3737,22 +5001,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_async.py" }, { "canonical": true, @@ -3761,30 +5025,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.create_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.CreateAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "CreateAzureNodePool" + "shortName": "DeleteAzureNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.CreateAzureNodePoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "azure_node_pool", - "type": "google.cloud.gke_multicloud_v1.types.AzureNodePool" + "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest" }, { - "name": "azure_node_pool_id", + "name": "name", "type": "str" }, { @@ -3801,21 +5057,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_azure_node_pool" + "shortName": "delete_azure_node_pool" }, - "description": "Sample for CreateAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_sync.py", + "description": "Sample for DeleteAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_CreateAzureNodePool_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureNodePool_sync", "segments": [ { - "end": 65, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3825,22 +5081,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_create_azure_node_pool_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_sync.py" }, { "canonical": true, @@ -3850,23 +5106,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.generate_azure_access_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureAccessToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureClient" + "shortName": "GenerateAzureAccessToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClientRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenRequest" }, { "name": "retry", @@ -3881,22 +5133,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_azure_client" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenResponse", + "shortName": "generate_azure_access_token" }, - "description": "Sample for DeleteAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_async.py", + "description": "Sample for GenerateAzureAccessToken", + "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureClient_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureAccessToken_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3911,17 +5163,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_async.py" }, { "canonical": true, @@ -3930,23 +5182,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.generate_azure_access_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureAccessToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureClient" + "shortName": "GenerateAzureAccessToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClientRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenRequest" }, { "name": "retry", @@ -3961,22 +5209,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_azure_client" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenResponse", + "shortName": "generate_azure_access_token" }, - "description": "Sample for DeleteAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_sync.py", + "description": "Sample for GenerateAzureAccessToken", + "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureClient_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureAccessToken_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3991,17 +5239,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_client_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_sync.py" }, { "canonical": true, @@ -4011,23 +5259,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.generate_azure_cluster_agent_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureClusterAgentToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureCluster" + "shortName": "GenerateAzureClusterAgentToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenRequest" }, { "name": "retry", @@ -4042,22 +5286,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_azure_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse", + "shortName": "generate_azure_cluster_agent_token" }, - "description": "Sample for DeleteAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_async.py", + "description": "Sample for GenerateAzureClusterAgentToken", + "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureCluster_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4067,22 +5311,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, - { - "end": 56, - "start": 53, + { + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_async.py" }, { "canonical": true, @@ -4091,23 +5335,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.generate_azure_cluster_agent_token", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureClusterAgentToken", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureCluster" + "shortName": "GenerateAzureClusterAgentToken" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureClusterRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenRequest" }, { "name": "retry", @@ -4122,22 +5362,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_azure_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse", + "shortName": "generate_azure_cluster_agent_token" }, - "description": "Sample for DeleteAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_sync.py", + "description": "Sample for GenerateAzureClusterAgentToken", + "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureCluster_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureClusterAgentToken_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4147,22 +5387,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_cluster_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_cluster_agent_token_sync.py" }, { "canonical": true, @@ -4172,19 +5412,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.delete_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_client", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureClient", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureNodePool" + "shortName": "GetAzureClient" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureClientRequest" }, { "name": "name", @@ -4203,22 +5443,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_azure_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureClient", + "shortName": "get_azure_client" }, - "description": "Sample for DeleteAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_async.py", + "description": "Sample for GetAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureNodePool_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureClient_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4233,17 +5473,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_async.py" }, { "canonical": true, @@ -4252,19 +5492,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.delete_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_client", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.DeleteAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureClient", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "DeleteAzureNodePool" + "shortName": "GetAzureClient" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.DeleteAzureNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureClientRequest" }, { "name": "name", @@ -4283,22 +5523,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_azure_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureClient", + "shortName": "get_azure_client" }, - "description": "Sample for DeleteAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_sync.py", + "description": "Sample for GetAzureClient", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_DeleteAzureNodePool_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureClient_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4313,17 +5553,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_delete_azure_node_pool_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_sync.py" }, { "canonical": true, @@ -4333,19 +5573,23 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.generate_azure_access_token", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureAccessToken", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GenerateAzureAccessToken" + "shortName": "GetAzureCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureClusterRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -4360,14 +5604,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenResponse", - "shortName": "generate_azure_access_token" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureCluster", + "shortName": "get_azure_cluster" }, - "description": "Sample for GenerateAzureAccessToken", - "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_async.py", + "description": "Sample for GetAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureAccessToken_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureCluster_async", "segments": [ { "end": 51, @@ -4400,7 +5644,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_async.py" }, { "canonical": true, @@ -4409,19 +5653,23 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.generate_azure_access_token", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_cluster", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GenerateAzureAccessToken", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureCluster", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GenerateAzureAccessToken" + "shortName": "GetAzureCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureClusterRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -4436,14 +5684,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.GenerateAzureAccessTokenResponse", - "shortName": "generate_azure_access_token" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureCluster", + "shortName": "get_azure_cluster" }, - "description": "Sample for GenerateAzureAccessToken", - "file": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_sync.py", + "description": "Sample for GetAzureCluster", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GenerateAzureAccessToken_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureCluster_sync", "segments": [ { "end": 51, @@ -4476,7 +5724,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_generate_azure_access_token_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_sync.py" }, { "canonical": true, @@ -4486,22 +5734,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_json_web_keys", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureJsonWebKeys", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureClient" + "shortName": "GetAzureJsonWebKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureClientRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureJsonWebKeysRequest" }, { - "name": "name", + "name": "azure_cluster", "type": "str" }, { @@ -4517,14 +5765,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureClient", - "shortName": "get_azure_client" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureJsonWebKeys", + "shortName": "get_azure_json_web_keys" }, - "description": "Sample for GetAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_async.py", + "description": "Sample for GetAzureJsonWebKeys", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureClient_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_async", "segments": [ { "end": 51, @@ -4557,7 +5805,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_async.py" }, { "canonical": true, @@ -4566,22 +5814,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_client", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_json_web_keys", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureClient", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureJsonWebKeys", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureClient" + "shortName": "GetAzureJsonWebKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureClientRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureJsonWebKeysRequest" }, { - "name": "name", + "name": "azure_cluster", "type": "str" }, { @@ -4597,14 +5845,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureClient", - "shortName": "get_azure_client" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureJsonWebKeys", + "shortName": "get_azure_json_web_keys" }, - "description": "Sample for GetAzureClient", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_sync.py", + "description": "Sample for GetAzureJsonWebKeys", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureClient_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureJsonWebKeys_sync", "segments": [ { "end": 51, @@ -4637,7 +5885,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_client_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_json_web_keys_sync.py" }, { "canonical": true, @@ -4647,19 +5895,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureCluster" + "shortName": "GetAzureNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureClusterRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureNodePoolRequest" }, { "name": "name", @@ -4678,14 +5926,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureCluster", - "shortName": "get_azure_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureNodePool", + "shortName": "get_azure_node_pool" }, - "description": "Sample for GetAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_async.py", + "description": "Sample for GetAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureCluster_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureNodePool_async", "segments": [ { "end": 51, @@ -4718,7 +5966,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_async.py" }, { "canonical": true, @@ -4727,19 +5975,19 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_cluster", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_node_pool", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureCluster", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureNodePool", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureCluster" + "shortName": "GetAzureNodePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureClusterRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureNodePoolRequest" }, { "name": "name", @@ -4758,14 +6006,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureCluster", - "shortName": "get_azure_cluster" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureNodePool", + "shortName": "get_azure_node_pool" }, - "description": "Sample for GetAzureCluster", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_sync.py", + "description": "Sample for GetAzureNodePool", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureCluster_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureNodePool_sync", "segments": [ { "end": 51, @@ -4798,7 +6046,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_cluster_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_sync.py" }, { "canonical": true, @@ -4808,22 +6056,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient", "shortName": "AzureClustersAsyncClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersAsyncClient.get_azure_open_id_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureOpenIdConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureNodePool" + "shortName": "GetAzureOpenIdConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureOpenIdConfigRequest" }, { - "name": "name", + "name": "azure_cluster", "type": "str" }, { @@ -4839,14 +6087,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureNodePool", - "shortName": "get_azure_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureOpenIdConfig", + "shortName": "get_azure_open_id_config" }, - "description": "Sample for GetAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_async.py", + "description": "Sample for GetAzureOpenIdConfig", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureNodePool_async", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_async", "segments": [ { "end": 51, @@ -4879,7 +6127,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_async.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_async.py" }, { "canonical": true, @@ -4888,22 +6136,22 @@ "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient", "shortName": "AzureClustersClient" }, - "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_node_pool", + "fullName": "google.cloud.gke_multicloud_v1.AzureClustersClient.get_azure_open_id_config", "method": { - "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureNodePool", + "fullName": "google.cloud.gkemulticloud.v1.AzureClusters.GetAzureOpenIdConfig", "service": { "fullName": "google.cloud.gkemulticloud.v1.AzureClusters", "shortName": "AzureClusters" }, - "shortName": "GetAzureNodePool" + "shortName": "GetAzureOpenIdConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.gke_multicloud_v1.types.GetAzureNodePoolRequest" + "type": "google.cloud.gke_multicloud_v1.types.GetAzureOpenIdConfigRequest" }, { - "name": "name", + "name": "azure_cluster", "type": "str" }, { @@ -4919,14 +6167,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.gke_multicloud_v1.types.AzureNodePool", - "shortName": "get_azure_node_pool" + "resultType": "google.cloud.gke_multicloud_v1.types.AzureOpenIdConfig", + "shortName": "get_azure_open_id_config" }, - "description": "Sample for GetAzureNodePool", - "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_sync.py", + "description": "Sample for GetAzureOpenIdConfig", + "file": "gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureNodePool_sync", + "regionTag": "gkemulticloud_v1_generated_AzureClusters_GetAzureOpenIdConfig_sync", "segments": [ { "end": 51, @@ -4959,7 +6207,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_node_pool_sync.py" + "title": "gkemulticloud_v1_generated_azure_clusters_get_azure_open_id_config_sync.py" }, { "canonical": true, @@ -5658,12 +6906,12 @@ "regionTag": "gkemulticloud_v1_generated_AzureClusters_UpdateAzureCluster_async", "segments": [ { - "end": 66, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 66, + "end": 65, "start": 27, "type": "SHORT" }, @@ -5673,18 +6921,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 56, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 63, - "start": 57, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 67, - "start": 64, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], @@ -5742,12 +6990,12 @@ "regionTag": "gkemulticloud_v1_generated_AzureClusters_UpdateAzureCluster_sync", "segments": [ { - "end": 66, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 66, + "end": 65, "start": 27, "type": "SHORT" }, @@ -5757,18 +7005,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 56, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 63, - "start": 57, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 67, - "start": 64, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-gke-multicloud/scripts/fixup_gke_multicloud_v1_keywords.py b/packages/google-cloud-gke-multicloud/scripts/fixup_gke_multicloud_v1_keywords.py index eae5c4975569..637aada10eaf 100644 --- a/packages/google-cloud-gke-multicloud/scripts/fixup_gke_multicloud_v1_keywords.py +++ b/packages/google-cloud-gke-multicloud/scripts/fixup_gke_multicloud_v1_keywords.py @@ -46,30 +46,38 @@ class gke_multicloudCallTransformer(cst.CSTTransformer): 'create_azure_cluster': ('parent', 'azure_cluster', 'azure_cluster_id', 'validate_only', ), 'create_azure_node_pool': ('parent', 'azure_node_pool', 'azure_node_pool_id', 'validate_only', ), 'delete_attached_cluster': ('name', 'validate_only', 'allow_missing', 'ignore_errors', 'etag', ), - 'delete_aws_cluster': ('name', 'validate_only', 'allow_missing', 'etag', ), - 'delete_aws_node_pool': ('name', 'validate_only', 'allow_missing', 'etag', ), + 'delete_aws_cluster': ('name', 'validate_only', 'allow_missing', 'ignore_errors', 'etag', ), + 'delete_aws_node_pool': ('name', 'validate_only', 'allow_missing', 'ignore_errors', 'etag', ), 'delete_azure_client': ('name', 'allow_missing', 'validate_only', ), 'delete_azure_cluster': ('name', 'allow_missing', 'validate_only', 'etag', ), 'delete_azure_node_pool': ('name', 'validate_only', 'allow_missing', 'etag', ), - 'generate_attached_cluster_install_manifest': ('parent', 'attached_cluster_id', 'platform_version', ), + 'generate_attached_cluster_agent_token': ('attached_cluster', 'subject_token', 'subject_token_type', 'version', 'grant_type', 'audience', 'scope', 'requested_token_type', 'options', ), + 'generate_attached_cluster_install_manifest': ('parent', 'attached_cluster_id', 'platform_version', 'proxy_config', ), 'generate_aws_access_token': ('aws_cluster', ), + 'generate_aws_cluster_agent_token': ('aws_cluster', 'subject_token', 'subject_token_type', 'version', 'node_pool_id', 'grant_type', 'audience', 'scope', 'requested_token_type', 'options', ), 'generate_azure_access_token': ('azure_cluster', ), + 'generate_azure_cluster_agent_token': ('azure_cluster', 'subject_token', 'subject_token_type', 'version', 'node_pool_id', 'grant_type', 'audience', 'scope', 'requested_token_type', 'options', ), 'get_attached_cluster': ('name', ), 'get_attached_server_config': ('name', ), 'get_aws_cluster': ('name', ), + 'get_aws_json_web_keys': ('aws_cluster', ), 'get_aws_node_pool': ('name', ), + 'get_aws_open_id_config': ('aws_cluster', ), 'get_aws_server_config': ('name', ), 'get_azure_client': ('name', ), 'get_azure_cluster': ('name', ), + 'get_azure_json_web_keys': ('azure_cluster', ), 'get_azure_node_pool': ('name', ), + 'get_azure_open_id_config': ('azure_cluster', ), 'get_azure_server_config': ('name', ), - 'import_attached_cluster': ('parent', 'fleet_membership', 'platform_version', 'distribution', 'validate_only', ), + 'import_attached_cluster': ('parent', 'fleet_membership', 'platform_version', 'distribution', 'validate_only', 'proxy_config', ), 'list_attached_clusters': ('parent', 'page_size', 'page_token', ), 'list_aws_clusters': ('parent', 'page_size', 'page_token', ), 'list_aws_node_pools': ('parent', 'page_size', 'page_token', ), 'list_azure_clients': ('parent', 'page_size', 'page_token', ), 'list_azure_clusters': ('parent', 'page_size', 'page_token', ), 'list_azure_node_pools': ('parent', 'page_size', 'page_token', ), + 'rollback_aws_node_pool_update': ('name', 'respect_pdb', ), 'update_attached_cluster': ('attached_cluster', 'update_mask', 'validate_only', ), 'update_aws_cluster': ('aws_cluster', 'update_mask', 'validate_only', ), 'update_aws_node_pool': ('aws_node_pool', 'update_mask', 'validate_only', ), diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index 85ad56891e2b..674488dc1f8c 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -3005,6 +3005,179 @@ async def test_generate_attached_cluster_install_manifest_flattened_error_async( ) +@pytest.mark.parametrize( + "request_type", + [ + attached_service.GenerateAttachedClusterAgentTokenRequest, + dict, + ], +) +def test_generate_attached_cluster_agent_token(request_type, transport: str = "grpc"): + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_attached_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = attached_service.GenerateAttachedClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + response = client.generate_attached_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == attached_service.GenerateAttachedClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance( + response, attached_service.GenerateAttachedClusterAgentTokenResponse + ) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +def test_generate_attached_cluster_agent_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_attached_cluster_agent_token), "__call__" + ) as call: + client.generate_attached_cluster_agent_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == attached_service.GenerateAttachedClusterAgentTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_attached_cluster_agent_token_async( + transport: str = "grpc_asyncio", + request_type=attached_service.GenerateAttachedClusterAgentTokenRequest, +): + client = AttachedClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_attached_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attached_service.GenerateAttachedClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + ) + response = await client.generate_attached_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == attached_service.GenerateAttachedClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance( + response, attached_service.GenerateAttachedClusterAgentTokenResponse + ) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +@pytest.mark.asyncio +async def test_generate_attached_cluster_agent_token_async_from_dict(): + await test_generate_attached_cluster_agent_token_async(request_type=dict) + + +def test_generate_attached_cluster_agent_token_field_headers(): + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attached_service.GenerateAttachedClusterAgentTokenRequest() + + request.attached_cluster = "attached_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_attached_cluster_agent_token), "__call__" + ) as call: + call.return_value = attached_service.GenerateAttachedClusterAgentTokenResponse() + client.generate_attached_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "attached_cluster=attached_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_attached_cluster_agent_token_field_headers_async(): + client = AttachedClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = attached_service.GenerateAttachedClusterAgentTokenRequest() + + request.attached_cluster = "attached_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_attached_cluster_agent_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attached_service.GenerateAttachedClusterAgentTokenResponse() + ) + await client.generate_attached_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "attached_cluster=attached_cluster_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -3043,8 +3216,15 @@ def test_create_attached_cluster_rest(request_type): }, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "authorization": {"admin_users": [{"username": "username_value"}]}, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "proxy_config": { + "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} + }, + "binary_authorization": {"evaluation_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3443,8 +3623,15 @@ def test_update_attached_cluster_rest(request_type): }, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "authorization": {"admin_users": [{"username": "username_value"}]}, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "proxy_config": { + "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} + }, + "binary_authorization": {"evaluation_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5377,6 +5564,7 @@ def test_generate_attached_cluster_install_manifest_rest_required_fields( ( "attached_cluster_id", "platform_version", + "proxy_config", ) ) jsonified_request.update(unset_fields) @@ -5458,6 +5646,7 @@ def test_generate_attached_cluster_install_manifest_rest_unset_required_fields() ( "attachedClusterId", "platformVersion", + "proxyConfig", ) ) & set( @@ -5629,6 +5818,270 @@ def test_generate_attached_cluster_install_manifest_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + attached_service.GenerateAttachedClusterAgentTokenRequest, + dict, + ], +) +def test_generate_attached_cluster_agent_token_rest(request_type): + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "attached_cluster": "projects/sample1/locations/sample2/attachedClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attached_service.GenerateAttachedClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = attached_service.GenerateAttachedClusterAgentTokenResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_attached_cluster_agent_token(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, attached_service.GenerateAttachedClusterAgentTokenResponse + ) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +def test_generate_attached_cluster_agent_token_rest_required_fields( + request_type=attached_service.GenerateAttachedClusterAgentTokenRequest, +): + transport_class = transports.AttachedClustersRestTransport + + request_init = {} + request_init["attached_cluster"] = "" + request_init["subject_token"] = "" + request_init["subject_token_type"] = "" + request_init["version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_attached_cluster_agent_token._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["attachedCluster"] = "attached_cluster_value" + jsonified_request["subjectToken"] = "subject_token_value" + jsonified_request["subjectTokenType"] = "subject_token_type_value" + jsonified_request["version"] = "version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_attached_cluster_agent_token._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "attachedCluster" in jsonified_request + assert jsonified_request["attachedCluster"] == "attached_cluster_value" + assert "subjectToken" in jsonified_request + assert jsonified_request["subjectToken"] == "subject_token_value" + assert "subjectTokenType" in jsonified_request + assert jsonified_request["subjectTokenType"] == "subject_token_type_value" + assert "version" in jsonified_request + assert jsonified_request["version"] == "version_value" + + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = attached_service.GenerateAttachedClusterAgentTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + attached_service.GenerateAttachedClusterAgentTokenResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.generate_attached_cluster_agent_token(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_attached_cluster_agent_token_rest_unset_required_fields(): + transport = transports.AttachedClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.generate_attached_cluster_agent_token._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "attachedCluster", + "subjectToken", + "subjectTokenType", + "version", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_attached_cluster_agent_token_rest_interceptors(null_interceptor): + transport = transports.AttachedClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AttachedClustersRestInterceptor(), + ) + client = AttachedClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_generate_attached_cluster_agent_token", + ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "pre_generate_attached_cluster_agent_token", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attached_service.GenerateAttachedClusterAgentTokenRequest.pb( + attached_service.GenerateAttachedClusterAgentTokenRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + attached_service.GenerateAttachedClusterAgentTokenResponse.to_json( + attached_service.GenerateAttachedClusterAgentTokenResponse() + ) + ) + + request = attached_service.GenerateAttachedClusterAgentTokenRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attached_service.GenerateAttachedClusterAgentTokenResponse() + + client.generate_attached_cluster_agent_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_attached_cluster_agent_token_rest_bad_request( + transport: str = "rest", + request_type=attached_service.GenerateAttachedClusterAgentTokenRequest, +): + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "attached_cluster": "projects/sample1/locations/sample2/attachedClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_attached_cluster_agent_token(request) + + +def test_generate_attached_cluster_agent_token_rest_error(): + client = AttachedClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AttachedClustersGrpcTransport( @@ -5776,6 +6229,7 @@ def test_attached_clusters_base_transport(): "delete_attached_cluster", "get_attached_server_config", "generate_attached_cluster_install_manifest", + "generate_attached_cluster_agent_token", "get_operation", "cancel_operation", "delete_operation", @@ -6084,6 +6538,9 @@ def test_attached_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.generate_attached_cluster_install_manifest._session session2 = client2.transport.generate_attached_cluster_install_manifest._session assert session1 != session2 + session1 = client1.transport.generate_attached_cluster_agent_token._session + session2 = client2.transport.generate_attached_cluster_agent_token._session + assert session1 != session2 def test_attached_clusters_grpc_transport_channel(): diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index bd32b0dc2d40..5f03f5b0e2dc 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -2154,6 +2154,175 @@ async def test_delete_aws_cluster_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + aws_service.GenerateAwsClusterAgentTokenRequest, + dict, + ], +) +def test_generate_aws_cluster_agent_token(request_type, transport: str = "grpc"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_aws_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_service.GenerateAwsClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + response = client.generate_aws_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GenerateAwsClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_service.GenerateAwsClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +def test_generate_aws_cluster_agent_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_aws_cluster_agent_token), "__call__" + ) as call: + client.generate_aws_cluster_agent_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GenerateAwsClusterAgentTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_aws_cluster_agent_token_async( + transport: str = "grpc_asyncio", + request_type=aws_service.GenerateAwsClusterAgentTokenRequest, +): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_aws_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_service.GenerateAwsClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + ) + response = await client.generate_aws_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GenerateAwsClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_service.GenerateAwsClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +@pytest.mark.asyncio +async def test_generate_aws_cluster_agent_token_async_from_dict(): + await test_generate_aws_cluster_agent_token_async(request_type=dict) + + +def test_generate_aws_cluster_agent_token_field_headers(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GenerateAwsClusterAgentTokenRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_aws_cluster_agent_token), "__call__" + ) as call: + call.return_value = aws_service.GenerateAwsClusterAgentTokenResponse() + client.generate_aws_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_aws_cluster_agent_token_field_headers_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GenerateAwsClusterAgentTokenRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_aws_cluster_agent_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_service.GenerateAwsClusterAgentTokenResponse() + ) + await client.generate_aws_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2828,11 +2997,11 @@ async def test_update_aws_node_pool_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - aws_service.GetAwsNodePoolRequest, + aws_service.RollbackAwsNodePoolUpdateRequest, dict, ], ) -def test_get_aws_node_pool(request_type, transport: str = "grpc"): +def test_rollback_aws_node_pool_update(request_type, transport: str = "grpc"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2844,37 +3013,22 @@ def test_get_aws_node_pool(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsNodePool( - name="name_value", - version="version_value", - subnet_id="subnet_id_value", - state=aws_resources.AwsNodePool.State.PROVISIONING, - uid="uid_value", - reconciling=True, - etag="etag_value", - ) - response = client.get_aws_node_pool(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.rollback_aws_node_pool_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsNodePoolRequest() + assert args[0] == aws_service.RollbackAwsNodePoolUpdateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsNodePool) - assert response.name == "name_value" - assert response.version == "version_value" - assert response.subnet_id == "subnet_id_value" - assert response.state == aws_resources.AwsNodePool.State.PROVISIONING - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" + assert isinstance(response, future.Future) -def test_get_aws_node_pool_empty_call(): +def test_rollback_aws_node_pool_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AwsClustersClient( @@ -2884,17 +3038,18 @@ def test_get_aws_node_pool_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: - client.get_aws_node_pool() + client.rollback_aws_node_pool_update() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsNodePoolRequest() + assert args[0] == aws_service.RollbackAwsNodePoolUpdateRequest() @pytest.mark.asyncio -async def test_get_aws_node_pool_async( - transport: str = "grpc_asyncio", request_type=aws_service.GetAwsNodePoolRequest +async def test_rollback_aws_node_pool_update_async( + transport: str = "grpc_asyncio", + request_type=aws_service.RollbackAwsNodePoolUpdateRequest, ): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2907,60 +3062,45 @@ async def test_get_aws_node_pool_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsNodePool( - name="name_value", - version="version_value", - subnet_id="subnet_id_value", - state=aws_resources.AwsNodePool.State.PROVISIONING, - uid="uid_value", - reconciling=True, - etag="etag_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_aws_node_pool(request) + response = await client.rollback_aws_node_pool_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsNodePoolRequest() + assert args[0] == aws_service.RollbackAwsNodePoolUpdateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsNodePool) - assert response.name == "name_value" - assert response.version == "version_value" - assert response.subnet_id == "subnet_id_value" - assert response.state == aws_resources.AwsNodePool.State.PROVISIONING - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_aws_node_pool_async_from_dict(): - await test_get_aws_node_pool_async(request_type=dict) +async def test_rollback_aws_node_pool_update_async_from_dict(): + await test_rollback_aws_node_pool_update_async(request_type=dict) -def test_get_aws_node_pool_field_headers(): +def test_rollback_aws_node_pool_update_field_headers(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.GetAwsNodePoolRequest() + request = aws_service.RollbackAwsNodePoolUpdateRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: - call.return_value = aws_resources.AwsNodePool() - client.get_aws_node_pool(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.rollback_aws_node_pool_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2976,25 +3116,25 @@ def test_get_aws_node_pool_field_headers(): @pytest.mark.asyncio -async def test_get_aws_node_pool_field_headers_async(): +async def test_rollback_aws_node_pool_update_field_headers_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.GetAwsNodePoolRequest() + request = aws_service.RollbackAwsNodePoolUpdateRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsNodePool() + operations_pb2.Operation(name="operations/op") ) - await client.get_aws_node_pool(request) + await client.rollback_aws_node_pool_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3009,20 +3149,20 @@ async def test_get_aws_node_pool_field_headers_async(): ) in kw["metadata"] -def test_get_aws_node_pool_flattened(): +def test_rollback_aws_node_pool_update_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsNodePool() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_aws_node_pool( + client.rollback_aws_node_pool_update( name="name_value", ) @@ -3035,7 +3175,7 @@ def test_get_aws_node_pool_flattened(): assert arg == mock_val -def test_get_aws_node_pool_flattened_error(): +def test_rollback_aws_node_pool_update_flattened_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3043,31 +3183,31 @@ def test_get_aws_node_pool_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_aws_node_pool( - aws_service.GetAwsNodePoolRequest(), + client.rollback_aws_node_pool_update( + aws_service.RollbackAwsNodePoolUpdateRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_aws_node_pool_flattened_async(): +async def test_rollback_aws_node_pool_update_flattened_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_node_pool), "__call__" + type(client.transport.rollback_aws_node_pool_update), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsNodePool() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsNodePool() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_aws_node_pool( + response = await client.rollback_aws_node_pool_update( name="name_value", ) @@ -3081,7 +3221,7 @@ async def test_get_aws_node_pool_flattened_async(): @pytest.mark.asyncio -async def test_get_aws_node_pool_flattened_error_async(): +async def test_rollback_aws_node_pool_update_flattened_error_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3089,8 +3229,8 @@ async def test_get_aws_node_pool_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_aws_node_pool( - aws_service.GetAwsNodePoolRequest(), + await client.rollback_aws_node_pool_update( + aws_service.RollbackAwsNodePoolUpdateRequest(), name="name_value", ) @@ -3098,11 +3238,11 @@ async def test_get_aws_node_pool_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - aws_service.ListAwsNodePoolsRequest, + aws_service.GetAwsNodePoolRequest, dict, ], ) -def test_list_aws_node_pools(request_type, transport: str = "grpc"): +def test_get_aws_node_pool(request_type, transport: str = "grpc"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3114,25 +3254,37 @@ def test_list_aws_node_pools(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_service.ListAwsNodePoolsResponse( - next_page_token="next_page_token_value", + call.return_value = aws_resources.AwsNodePool( + name="name_value", + version="version_value", + subnet_id="subnet_id_value", + state=aws_resources.AwsNodePool.State.PROVISIONING, + uid="uid_value", + reconciling=True, + etag="etag_value", ) - response = client.list_aws_node_pools(request) + response = client.get_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.ListAwsNodePoolsRequest() + assert args[0] == aws_service.GetAwsNodePoolRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAwsNodePoolsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, aws_resources.AwsNodePool) + assert response.name == "name_value" + assert response.version == "version_value" + assert response.subnet_id == "subnet_id_value" + assert response.state == aws_resources.AwsNodePool.State.PROVISIONING + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" -def test_list_aws_node_pools_empty_call(): +def test_get_aws_node_pool_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AwsClustersClient( @@ -3142,17 +3294,17 @@ def test_list_aws_node_pools_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: - client.list_aws_node_pools() + client.get_aws_node_pool() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.ListAwsNodePoolsRequest() + assert args[0] == aws_service.GetAwsNodePoolRequest() @pytest.mark.asyncio -async def test_list_aws_node_pools_async( - transport: str = "grpc_asyncio", request_type=aws_service.ListAwsNodePoolsRequest +async def test_get_aws_node_pool_async( + transport: str = "grpc_asyncio", request_type=aws_service.GetAwsNodePoolRequest ): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3165,48 +3317,60 @@ async def test_list_aws_node_pools_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_service.ListAwsNodePoolsResponse( - next_page_token="next_page_token_value", + aws_resources.AwsNodePool( + name="name_value", + version="version_value", + subnet_id="subnet_id_value", + state=aws_resources.AwsNodePool.State.PROVISIONING, + uid="uid_value", + reconciling=True, + etag="etag_value", ) ) - response = await client.list_aws_node_pools(request) + response = await client.get_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.ListAwsNodePoolsRequest() + assert args[0] == aws_service.GetAwsNodePoolRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAwsNodePoolsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, aws_resources.AwsNodePool) + assert response.name == "name_value" + assert response.version == "version_value" + assert response.subnet_id == "subnet_id_value" + assert response.state == aws_resources.AwsNodePool.State.PROVISIONING + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" @pytest.mark.asyncio -async def test_list_aws_node_pools_async_from_dict(): - await test_list_aws_node_pools_async(request_type=dict) +async def test_get_aws_node_pool_async_from_dict(): + await test_get_aws_node_pool_async(request_type=dict) -def test_list_aws_node_pools_field_headers(): +def test_get_aws_node_pool_field_headers(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.ListAwsNodePoolsRequest() + request = aws_service.GetAwsNodePoolRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: - call.return_value = aws_service.ListAwsNodePoolsResponse() - client.list_aws_node_pools(request) + call.return_value = aws_resources.AwsNodePool() + client.get_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3217,30 +3381,30 @@ def test_list_aws_node_pools_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_aws_node_pools_field_headers_async(): +async def test_get_aws_node_pool_field_headers_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.ListAwsNodePoolsRequest() + request = aws_service.GetAwsNodePoolRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_service.ListAwsNodePoolsResponse() + aws_resources.AwsNodePool() ) - await client.list_aws_node_pools(request) + await client.get_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3251,37 +3415,37 @@ async def test_list_aws_node_pools_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_aws_node_pools_flattened(): +def test_get_aws_node_pool_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_service.ListAwsNodePoolsResponse() + call.return_value = aws_resources.AwsNodePool() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_aws_node_pools( - parent="parent_value", + client.get_aws_node_pool( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_aws_node_pools_flattened_error(): +def test_get_aws_node_pool_flattened_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3289,45 +3453,45 @@ def test_list_aws_node_pools_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_aws_node_pools( - aws_service.ListAwsNodePoolsRequest(), - parent="parent_value", + client.get_aws_node_pool( + aws_service.GetAwsNodePoolRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_aws_node_pools_flattened_async(): +async def test_get_aws_node_pool_flattened_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), "__call__" + type(client.transport.get_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_service.ListAwsNodePoolsResponse() + call.return_value = aws_resources.AwsNodePool() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_service.ListAwsNodePoolsResponse() + aws_resources.AwsNodePool() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_aws_node_pools( - parent="parent_value", + response = await client.get_aws_node_pool( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_aws_node_pools_flattened_error_async(): +async def test_get_aws_node_pool_flattened_error_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3335,316 +3499,124 @@ async def test_list_aws_node_pools_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_aws_node_pools( - aws_service.ListAwsNodePoolsRequest(), - parent="parent_value", + await client.get_aws_node_pool( + aws_service.GetAwsNodePoolRequest(), + name="name_value", ) -def test_list_aws_node_pools_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + aws_service.ListAwsNodePoolsRequest, + dict, + ], +) +def test_list_aws_node_pools(request_type, transport: str = "grpc"): client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_aws_node_pools), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - next_page_token="abc", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[], - next_page_token="def", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - ], - next_page_token="ghi", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = aws_service.ListAwsNodePoolsResponse( + next_page_token="next_page_token_value", ) - pager = client.list_aws_node_pools(request={}) + response = client.list_aws_node_pools(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.ListAwsNodePoolsRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, aws_resources.AwsNodePool) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAwsNodePoolsPager) + assert response.next_page_token == "next_page_token_value" -def test_list_aws_node_pools_pages(transport_name: str = "grpc"): +def test_list_aws_node_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_aws_node_pools), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - next_page_token="abc", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[], - next_page_token="def", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - ], - next_page_token="ghi", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - ), - RuntimeError, - ) - pages = list(client.list_aws_node_pools(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_aws_node_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.ListAwsNodePoolsRequest() @pytest.mark.asyncio -async def test_list_aws_node_pools_async_pager(): +async def test_list_aws_node_pools_async( + transport: str = "grpc_asyncio", request_type=aws_service.ListAwsNodePoolsRequest +): client = AwsClustersAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_aws_node_pools), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_aws_node_pools), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - next_page_token="abc", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[], - next_page_token="def", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - ], - next_page_token="ghi", - ), + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_aws_node_pools( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, aws_resources.AwsNodePool) for i in responses) - - -@pytest.mark.asyncio -async def test_list_aws_node_pools_async_pages(): - client = AwsClustersAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_aws_node_pools), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - next_page_token="abc", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[], - next_page_token="def", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - ], - next_page_token="ghi", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_aws_node_pools(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - aws_service.DeleteAwsNodePoolRequest, - dict, - ], -) -def test_delete_aws_node_pool(request_type, transport: str = "grpc"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_aws_node_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.DeleteAwsNodePoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_aws_node_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" - ) as call: - client.delete_aws_node_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.DeleteAwsNodePoolRequest() - - -@pytest.mark.asyncio -async def test_delete_aws_node_pool_async( - transport: str = "grpc_asyncio", request_type=aws_service.DeleteAwsNodePoolRequest -): - client = AwsClustersAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + next_page_token="next_page_token_value", + ) ) - response = await client.delete_aws_node_pool(request) + response = await client.list_aws_node_pools(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.DeleteAwsNodePoolRequest() + assert args[0] == aws_service.ListAwsNodePoolsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListAwsNodePoolsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_delete_aws_node_pool_async_from_dict(): - await test_delete_aws_node_pool_async(request_type=dict) +async def test_list_aws_node_pools_async_from_dict(): + await test_list_aws_node_pools_async(request_type=dict) -def test_delete_aws_node_pool_field_headers(): +def test_list_aws_node_pools_field_headers(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.DeleteAwsNodePoolRequest() + request = aws_service.ListAwsNodePoolsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" + type(client.transport.list_aws_node_pools), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_aws_node_pool(request) + call.return_value = aws_service.ListAwsNodePoolsResponse() + client.list_aws_node_pools(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3655,30 +3627,30 @@ def test_delete_aws_node_pool_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_aws_node_pool_field_headers_async(): +async def test_list_aws_node_pools_field_headers_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.DeleteAwsNodePoolRequest() + request = aws_service.ListAwsNodePoolsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" + type(client.transport.list_aws_node_pools), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + aws_service.ListAwsNodePoolsResponse() ) - await client.delete_aws_node_pool(request) + await client.list_aws_node_pools(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3689,37 +3661,37 @@ async def test_delete_aws_node_pool_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_aws_node_pool_flattened(): +def test_list_aws_node_pools_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" + type(client.transport.list_aws_node_pools), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = aws_service.ListAwsNodePoolsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_aws_node_pool( - name="name_value", + client.list_aws_node_pools( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_aws_node_pool_flattened_error(): +def test_list_aws_node_pools_flattened_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3727,45 +3699,45 @@ def test_delete_aws_node_pool_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_aws_node_pool( - aws_service.DeleteAwsNodePoolRequest(), - name="name_value", + client.list_aws_node_pools( + aws_service.ListAwsNodePoolsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_aws_node_pool_flattened_async(): +async def test_list_aws_node_pools_flattened_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_aws_node_pool), "__call__" + type(client.transport.list_aws_node_pools), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = aws_service.ListAwsNodePoolsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + aws_service.ListAwsNodePoolsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_aws_node_pool( - name="name_value", + response = await client.list_aws_node_pools( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_aws_node_pool_flattened_error_async(): +async def test_list_aws_node_pools_flattened_error_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3773,52 +3745,245 @@ async def test_delete_aws_node_pool_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_aws_node_pool( - aws_service.DeleteAwsNodePoolRequest(), - name="name_value", + await client.list_aws_node_pools( + aws_service.ListAwsNodePoolsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - aws_service.GetAwsServerConfigRequest, - dict, - ], -) -def test_get_aws_server_config(request_type, transport: str = "grpc"): +def test_list_aws_node_pools_pager(transport_name: str = "grpc"): client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.list_aws_node_pools), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsServerConfig( - name="name_value", - supported_aws_regions=["supported_aws_regions_value"], - ) - response = client.get_aws_server_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsServerConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsServerConfig) - assert response.name == "name_value" - assert response.supported_aws_regions == ["supported_aws_regions_value"] + # Set the response to a series of pages. + call.side_effect = ( + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + next_page_token="abc", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[], + next_page_token="def", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_aws_node_pools(request={}) + assert pager._metadata == metadata -def test_get_aws_server_config_empty_call(): + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, aws_resources.AwsNodePool) for i in results) + + +def test_list_aws_node_pools_pages(transport_name: str = "grpc"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aws_node_pools), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + next_page_token="abc", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[], + next_page_token="def", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + ), + RuntimeError, + ) + pages = list(client.list_aws_node_pools(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_aws_node_pools_async_pager(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aws_node_pools), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + next_page_token="abc", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[], + next_page_token="def", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_aws_node_pools( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, aws_resources.AwsNodePool) for i in responses) + + +@pytest.mark.asyncio +async def test_list_aws_node_pools_async_pages(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_aws_node_pools), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + next_page_token="abc", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[], + next_page_token="def", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_aws_node_pools(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.DeleteAwsNodePoolRequest, + dict, + ], +) +def test_delete_aws_node_pool(request_type, transport: str = "grpc"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_aws_node_pool), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_aws_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.DeleteAwsNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_aws_node_pool_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AwsClustersClient( @@ -3828,17 +3993,17 @@ def test_get_aws_server_config_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: - client.get_aws_server_config() + client.delete_aws_node_pool() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsServerConfigRequest() + assert args[0] == aws_service.DeleteAwsNodePoolRequest() @pytest.mark.asyncio -async def test_get_aws_server_config_async( - transport: str = "grpc_asyncio", request_type=aws_service.GetAwsServerConfigRequest +async def test_delete_aws_node_pool_async( + transport: str = "grpc_asyncio", request_type=aws_service.DeleteAwsNodePoolRequest ): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3851,50 +4016,45 @@ async def test_get_aws_server_config_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsServerConfig( - name="name_value", - supported_aws_regions=["supported_aws_regions_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_aws_server_config(request) + response = await client.delete_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == aws_service.GetAwsServerConfigRequest() + assert args[0] == aws_service.DeleteAwsNodePoolRequest() # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsServerConfig) - assert response.name == "name_value" - assert response.supported_aws_regions == ["supported_aws_regions_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_aws_server_config_async_from_dict(): - await test_get_aws_server_config_async(request_type=dict) +async def test_delete_aws_node_pool_async_from_dict(): + await test_delete_aws_node_pool_async(request_type=dict) -def test_get_aws_server_config_field_headers(): +def test_delete_aws_node_pool_field_headers(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.GetAwsServerConfigRequest() + request = aws_service.DeleteAwsNodePoolRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: - call.return_value = aws_resources.AwsServerConfig() - client.get_aws_server_config(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3910,25 +4070,25 @@ def test_get_aws_server_config_field_headers(): @pytest.mark.asyncio -async def test_get_aws_server_config_field_headers_async(): +async def test_delete_aws_node_pool_field_headers_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = aws_service.GetAwsServerConfigRequest() + request = aws_service.DeleteAwsNodePoolRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsServerConfig() + operations_pb2.Operation(name="operations/op") ) - await client.get_aws_server_config(request) + await client.delete_aws_node_pool(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3943,20 +4103,20 @@ async def test_get_aws_server_config_field_headers_async(): ) in kw["metadata"] -def test_get_aws_server_config_flattened(): +def test_delete_aws_node_pool_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsServerConfig() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_aws_server_config( + client.delete_aws_node_pool( name="name_value", ) @@ -3969,7 +4129,7 @@ def test_get_aws_server_config_flattened(): assert arg == mock_val -def test_get_aws_server_config_flattened_error(): +def test_delete_aws_node_pool_flattened_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3977,31 +4137,31 @@ def test_get_aws_server_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_aws_server_config( - aws_service.GetAwsServerConfigRequest(), + client.delete_aws_node_pool( + aws_service.DeleteAwsNodePoolRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_aws_server_config_flattened_async(): +async def test_delete_aws_node_pool_flattened_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_aws_server_config), "__call__" + type(client.transport.delete_aws_node_pool), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = aws_resources.AwsServerConfig() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - aws_resources.AwsServerConfig() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_aws_server_config( + response = await client.delete_aws_node_pool( name="name_value", ) @@ -4015,7 +4175,7 @@ async def test_get_aws_server_config_flattened_async(): @pytest.mark.asyncio -async def test_get_aws_server_config_flattened_error_async(): +async def test_delete_aws_node_pool_flattened_error_async(): client = AwsClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4023,8 +4183,8 @@ async def test_get_aws_server_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_aws_server_config( - aws_service.GetAwsServerConfigRequest(), + await client.delete_aws_node_pool( + aws_service.DeleteAwsNodePoolRequest(), name="name_value", ) @@ -4032,150 +4192,2116 @@ async def test_get_aws_server_config_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - aws_service.CreateAwsClusterRequest, + aws_service.GetAwsOpenIdConfigRequest, dict, ], ) -def test_create_aws_cluster_rest(request_type): +def test_get_aws_open_id_config(request_type, transport: str = "grpc"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["aws_cluster"] = { - "name": "name_value", - "description": "description_value", - "networking": { - "vpc_id": "vpc_id_value", - "pod_address_cidr_blocks": [ - "pod_address_cidr_blocks_value1", - "pod_address_cidr_blocks_value2", - ], - "service_address_cidr_blocks": [ - "service_address_cidr_blocks_value1", - "service_address_cidr_blocks_value2", - ], - }, - "aws_region": "aws_region_value", - "control_plane": { - "version": "version_value", - "instance_type": "instance_type_value", - "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, - "subnet_ids": ["subnet_ids_value1", "subnet_ids_value2"], - "security_group_ids": [ - "security_group_ids_value1", - "security_group_ids_value2", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_open_id_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_resources.AwsOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" ], - "iam_instance_profile": "iam_instance_profile_value", - "root_volume": { - "size_gib": 844, - "volume_type": 1, - "iops": 443, - "kms_key_arn": "kms_key_arn_value", - }, - "main_volume": {}, - "database_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "tags": {}, - "aws_services_authentication": { - "role_arn": "role_arn_value", - "role_session_name": "role_session_name_value", - }, - "proxy_config": { - "secret_arn": "secret_arn_value", - "secret_version": "secret_version_value", - }, - "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "instance_placement": {"tenancy": 1}, - }, - "authorization": {"admin_users": [{"username": "username_value"}]}, - "state": 1, - "endpoint": "endpoint_value", - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "workload_identity_config": { - "issuer_uri": "issuer_uri_value", - "workload_pool": "workload_pool_value", - "identity_provider": "identity_provider_value", - }, - "cluster_ca_certificate": "cluster_ca_certificate_value", - "fleet": {"project": "project_value", "membership": "membership_value"}, - "logging_config": {"component_config": {"enable_components": [1]}}, - "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], + ) + response = client.get_aws_open_id_config(request) - # Determine if the message type is proto-plus or protobuf - test_field = aws_service.CreateAwsClusterRequest.meta.fields["aws_cluster"] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsOpenIdConfigRequest() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" + ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +def test_get_aws_open_id_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_open_id_config), "__call__" + ) as call: + client.get_aws_open_id_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsOpenIdConfigRequest() + + +@pytest.mark.asyncio +async def test_get_aws_open_id_config_async( + transport: str = "grpc_asyncio", request_type=aws_service.GetAwsOpenIdConfigRequest +): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_open_id_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" + ], + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], + ) + ) + response = await client.get_aws_open_id_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsOpenIdConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aws_cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +@pytest.mark.asyncio +async def test_get_aws_open_id_config_async_from_dict(): + await test_get_aws_open_id_config_async(request_type=dict) + + +def test_get_aws_open_id_config_field_headers(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsOpenIdConfigRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_open_id_config), "__call__" + ) as call: + call.return_value = aws_resources.AwsOpenIdConfig() + client.get_aws_open_id_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_aws_open_id_config_field_headers_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsOpenIdConfigRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_open_id_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsOpenIdConfig() + ) + await client.get_aws_open_id_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.GetAwsJsonWebKeysRequest, + dict, + ], +) +def test_get_aws_json_web_keys(request_type, transport: str = "grpc"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_resources.AwsJsonWebKeys() + response = client.get_aws_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsJsonWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsJsonWebKeys) + + +def test_get_aws_json_web_keys_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_json_web_keys), "__call__" + ) as call: + client.get_aws_json_web_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsJsonWebKeysRequest() + + +@pytest.mark.asyncio +async def test_get_aws_json_web_keys_async( + transport: str = "grpc_asyncio", request_type=aws_service.GetAwsJsonWebKeysRequest +): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsJsonWebKeys() + ) + response = await client.get_aws_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsJsonWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsJsonWebKeys) + + +@pytest.mark.asyncio +async def test_get_aws_json_web_keys_async_from_dict(): + await test_get_aws_json_web_keys_async(request_type=dict) + + +def test_get_aws_json_web_keys_field_headers(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsJsonWebKeysRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_json_web_keys), "__call__" + ) as call: + call.return_value = aws_resources.AwsJsonWebKeys() + client.get_aws_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_aws_json_web_keys_field_headers_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsJsonWebKeysRequest() + + request.aws_cluster = "aws_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_json_web_keys), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsJsonWebKeys() + ) + await client.get_aws_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "aws_cluster=aws_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.GetAwsServerConfigRequest, + dict, + ], +) +def test_get_aws_server_config(request_type, transport: str = "grpc"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_resources.AwsServerConfig( + name="name_value", + supported_aws_regions=["supported_aws_regions_value"], + ) + response = client.get_aws_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsServerConfig) + assert response.name == "name_value" + assert response.supported_aws_regions == ["supported_aws_regions_value"] + + +def test_get_aws_server_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + client.get_aws_server_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsServerConfigRequest() + + +@pytest.mark.asyncio +async def test_get_aws_server_config_async( + transport: str = "grpc_asyncio", request_type=aws_service.GetAwsServerConfigRequest +): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsServerConfig( + name="name_value", + supported_aws_regions=["supported_aws_regions_value"], + ) + ) + response = await client.get_aws_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == aws_service.GetAwsServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsServerConfig) + assert response.name == "name_value" + assert response.supported_aws_regions == ["supported_aws_regions_value"] + + +@pytest.mark.asyncio +async def test_get_aws_server_config_async_from_dict(): + await test_get_aws_server_config_async(request_type=dict) + + +def test_get_aws_server_config_field_headers(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + call.return_value = aws_resources.AwsServerConfig() + client.get_aws_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_aws_server_config_field_headers_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = aws_service.GetAwsServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsServerConfig() + ) + await client.get_aws_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_aws_server_config_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_resources.AwsServerConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_aws_server_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_aws_server_config_flattened_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_aws_server_config( + aws_service.GetAwsServerConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_aws_server_config_flattened_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_aws_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = aws_resources.AwsServerConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + aws_resources.AwsServerConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_aws_server_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_aws_server_config_flattened_error_async(): + client = AwsClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_aws_server_config( + aws_service.GetAwsServerConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.CreateAwsClusterRequest, + dict, + ], +) +def test_create_aws_cluster_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["aws_cluster"] = { + "name": "name_value", + "description": "description_value", + "networking": { + "vpc_id": "vpc_id_value", + "pod_address_cidr_blocks": [ + "pod_address_cidr_blocks_value1", + "pod_address_cidr_blocks_value2", + ], + "service_address_cidr_blocks": [ + "service_address_cidr_blocks_value1", + "service_address_cidr_blocks_value2", + ], + "per_node_pool_sg_rules_disabled": True, + }, + "aws_region": "aws_region_value", + "control_plane": { + "version": "version_value", + "instance_type": "instance_type_value", + "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, + "subnet_ids": ["subnet_ids_value1", "subnet_ids_value2"], + "security_group_ids": [ + "security_group_ids_value1", + "security_group_ids_value2", + ], + "iam_instance_profile": "iam_instance_profile_value", + "root_volume": { + "size_gib": 844, + "volume_type": 1, + "iops": 443, + "throughput": 1114, + "kms_key_arn": "kms_key_arn_value", + }, + "main_volume": {}, + "database_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "tags": {}, + "aws_services_authentication": { + "role_arn": "role_arn_value", + "role_session_name": "role_session_name_value", + }, + "proxy_config": { + "secret_arn": "secret_arn_value", + "secret_version": "secret_version_value", + }, + "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "instance_placement": {"tenancy": 1}, + }, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, + "state": 1, + "endpoint": "endpoint_value", + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "workload_identity_config": { + "issuer_uri": "issuer_uri_value", + "workload_pool": "workload_pool_value", + "identity_provider": "identity_provider_value", + }, + "cluster_ca_certificate": "cluster_ca_certificate_value", + "fleet": {"project": "project_value", "membership": "membership_value"}, + "logging_config": {"component_config": {"enable_components": [1]}}, + "errors": [{"message": "message_value"}], + "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "binary_authorization": {"evaluation_mode": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = aws_service.CreateAwsClusterRequest.meta.fields["aws_cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aws_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aws_cluster"][field])): + del request_init["aws_cluster"][field][i][subfield] + else: + del request_init["aws_cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_aws_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_aws_cluster_rest_required_fields( + request_type=aws_service.CreateAwsClusterRequest, +): + transport_class = transports.AwsClustersRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["aws_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "awsClusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_aws_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "awsClusterId" in jsonified_request + assert jsonified_request["awsClusterId"] == request_init["aws_cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["awsClusterId"] = "aws_cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_aws_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "aws_cluster_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "awsClusterId" in jsonified_request + assert jsonified_request["awsClusterId"] == "aws_cluster_id_value" + + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_aws_cluster(request) + + expected_params = [ + ( + "awsClusterId", + "", + ), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_aws_cluster_rest_unset_required_fields(): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_aws_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "awsClusterId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "awsCluster", + "awsClusterId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_aws_cluster_rest_interceptors(null_interceptor): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AwsClustersRestInterceptor(), + ) + client = AwsClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AwsClustersRestInterceptor, "post_create_aws_cluster" + ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "pre_create_aws_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = aws_service.CreateAwsClusterRequest.pb( + aws_service.CreateAwsClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = aws_service.CreateAwsClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_aws_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_aws_cluster_rest_bad_request( + transport: str = "rest", request_type=aws_service.CreateAwsClusterRequest +): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_aws_cluster(request) + + +def test_create_aws_cluster_rest_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + aws_cluster=aws_resources.AwsCluster(name="name_value"), + aws_cluster_id="aws_cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_aws_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/awsClusters" + % client.transport._host, + args[1], + ) + + +def test_create_aws_cluster_rest_flattened_error(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_aws_cluster( + aws_service.CreateAwsClusterRequest(), + parent="parent_value", + aws_cluster=aws_resources.AwsCluster(name="name_value"), + aws_cluster_id="aws_cluster_id_value", + ) + + +def test_create_aws_cluster_rest_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.UpdateAwsClusterRequest, + dict, + ], +) +def test_update_aws_cluster_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "aws_cluster": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3" + } + } + request_init["aws_cluster"] = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3", + "description": "description_value", + "networking": { + "vpc_id": "vpc_id_value", + "pod_address_cidr_blocks": [ + "pod_address_cidr_blocks_value1", + "pod_address_cidr_blocks_value2", + ], + "service_address_cidr_blocks": [ + "service_address_cidr_blocks_value1", + "service_address_cidr_blocks_value2", + ], + "per_node_pool_sg_rules_disabled": True, + }, + "aws_region": "aws_region_value", + "control_plane": { + "version": "version_value", + "instance_type": "instance_type_value", + "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, + "subnet_ids": ["subnet_ids_value1", "subnet_ids_value2"], + "security_group_ids": [ + "security_group_ids_value1", + "security_group_ids_value2", + ], + "iam_instance_profile": "iam_instance_profile_value", + "root_volume": { + "size_gib": 844, + "volume_type": 1, + "iops": 443, + "throughput": 1114, + "kms_key_arn": "kms_key_arn_value", + }, + "main_volume": {}, + "database_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "tags": {}, + "aws_services_authentication": { + "role_arn": "role_arn_value", + "role_session_name": "role_session_name_value", + }, + "proxy_config": { + "secret_arn": "secret_arn_value", + "secret_version": "secret_version_value", + }, + "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "instance_placement": {"tenancy": 1}, + }, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, + "state": 1, + "endpoint": "endpoint_value", + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "workload_identity_config": { + "issuer_uri": "issuer_uri_value", + "workload_pool": "workload_pool_value", + "identity_provider": "identity_provider_value", + }, + "cluster_ca_certificate": "cluster_ca_certificate_value", + "fleet": {"project": "project_value", "membership": "membership_value"}, + "logging_config": {"component_config": {"enable_components": [1]}}, + "errors": [{"message": "message_value"}], + "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "binary_authorization": {"evaluation_mode": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = aws_service.UpdateAwsClusterRequest.meta.fields["aws_cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aws_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aws_cluster"][field])): + del request_init["aws_cluster"][field][i][subfield] + else: + del request_init["aws_cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_aws_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_aws_cluster_rest_required_fields( + request_type=aws_service.UpdateAwsClusterRequest, +): + transport_class = transports.AwsClustersRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_aws_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_aws_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_aws_cluster(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_aws_cluster_rest_unset_required_fields(): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_aws_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "awsCluster", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_aws_cluster_rest_interceptors(null_interceptor): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AwsClustersRestInterceptor(), + ) + client = AwsClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AwsClustersRestInterceptor, "post_update_aws_cluster" + ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "pre_update_aws_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = aws_service.UpdateAwsClusterRequest.pb( + aws_service.UpdateAwsClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = aws_service.UpdateAwsClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_aws_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_aws_cluster_rest_bad_request( + transport: str = "rest", request_type=aws_service.UpdateAwsClusterRequest +): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "aws_cluster": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_aws_cluster(request) + + +def test_update_aws_cluster_rest_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "aws_cluster": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + aws_cluster=aws_resources.AwsCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_aws_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{aws_cluster.name=projects/*/locations/*/awsClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_aws_cluster_rest_flattened_error(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_aws_cluster( + aws_service.UpdateAwsClusterRequest(), + aws_cluster=aws_resources.AwsCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_aws_cluster_rest_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.GetAwsClusterRequest, + dict, + ], +) +def test_get_aws_cluster_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = aws_resources.AwsCluster( + name="name_value", + description="description_value", + aws_region="aws_region_value", + state=aws_resources.AwsCluster.State.PROVISIONING, + endpoint="endpoint_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + cluster_ca_certificate="cluster_ca_certificate_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_resources.AwsCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_aws_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, aws_resources.AwsCluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.aws_region == "aws_region_value" + assert response.state == aws_resources.AwsCluster.State.PROVISIONING + assert response.endpoint == "endpoint_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.cluster_ca_certificate == "cluster_ca_certificate_value" + + +def test_get_aws_cluster_rest_required_fields( + request_type=aws_service.GetAwsClusterRequest, +): + transport_class = transports.AwsClustersRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_aws_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_aws_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = aws_resources.AwsCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_resources.AwsCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_aws_cluster(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_aws_cluster_rest_unset_required_fields(): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_aws_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_aws_cluster_rest_interceptors(null_interceptor): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AwsClustersRestInterceptor(), + ) + client = AwsClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_get_aws_cluster" + ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "pre_get_aws_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = aws_service.GetAwsClusterRequest.pb( + aws_service.GetAwsClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = aws_resources.AwsCluster.to_json( + aws_resources.AwsCluster() + ) + + request = aws_service.GetAwsClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = aws_resources.AwsCluster() + + client.get_aws_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_aws_cluster_rest_bad_request( + transport: str = "rest", request_type=aws_service.GetAwsClusterRequest +): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_aws_cluster(request) + + +def test_get_aws_cluster_rest_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = aws_resources.AwsCluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_resources.AwsCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_aws_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/awsClusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_aws_cluster_rest_flattened_error(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_aws_cluster( + aws_service.GetAwsClusterRequest(), + name="name_value", + ) + + +def test_get_aws_cluster_rest_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.ListAwsClustersRequest, + dict, + ], +) +def test_list_aws_clusters_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = aws_service.ListAwsClustersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_service.ListAwsClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_aws_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAwsClustersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_aws_clusters_rest_required_fields( + request_type=aws_service.ListAwsClustersRequest, +): + transport_class = transports.AwsClustersRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_aws_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_aws_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = aws_service.ListAwsClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_service.ListAwsClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_aws_clusters(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_aws_clusters_rest_unset_required_fields(): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_aws_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_aws_clusters_rest_interceptors(null_interceptor): + transport = transports.AwsClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AwsClustersRestInterceptor(), + ) + client = AwsClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_list_aws_clusters" + ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "pre_list_aws_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = aws_service.ListAwsClustersRequest.pb( + aws_service.ListAwsClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = aws_service.ListAwsClustersResponse.to_json( + aws_service.ListAwsClustersResponse() + ) + + request = aws_service.ListAwsClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = aws_service.ListAwsClustersResponse() + + client.list_aws_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_aws_clusters_rest_bad_request( + transport: str = "rest", request_type=aws_service.ListAwsClustersRequest +): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_aws_clusters(request) + + +def test_list_aws_clusters_rest_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = aws_service.ListAwsClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_service.ListAwsClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_aws_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/awsClusters" + % client.transport._host, + args[1], + ) + + +def test_list_aws_clusters_rest_flattened_error(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_aws_clusters( + aws_service.ListAwsClustersRequest(), + parent="parent_value", + ) + + +def test_list_aws_clusters_rest_pager(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + aws_service.ListAwsClustersResponse( + aws_clusters=[ + aws_resources.AwsCluster(), + aws_resources.AwsCluster(), + aws_resources.AwsCluster(), + ], + next_page_token="abc", + ), + aws_service.ListAwsClustersResponse( + aws_clusters=[], + next_page_token="def", + ), + aws_service.ListAwsClustersResponse( + aws_clusters=[ + aws_resources.AwsCluster(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsClustersResponse( + aws_clusters=[ + aws_resources.AwsCluster(), + aws_resources.AwsCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + aws_service.ListAwsClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_aws_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, aws_resources.AwsCluster) for i in results) + + pages = list(client.list_aws_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aws_cluster"][field])): - del request_init["aws_cluster"][field][i][subfield] - else: - del request_init["aws_cluster"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + aws_service.DeleteAwsClusterRequest, + dict, + ], +) +def test_delete_aws_cluster_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4190,20 +6316,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_aws_cluster(request) + response = client.delete_aws_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_aws_cluster_rest_required_fields( - request_type=aws_service.CreateAwsClusterRequest, +def test_delete_aws_cluster_rest_required_fields( + request_type=aws_service.DeleteAwsClusterRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["parent"] = "" - request_init["aws_cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4215,37 +6340,33 @@ def test_create_aws_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "awsClusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aws_cluster._get_unset_required_fields(jsonified_request) + ).delete_aws_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "awsClusterId" in jsonified_request - assert jsonified_request["awsClusterId"] == request_init["aws_cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["awsClusterId"] = "aws_cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aws_cluster._get_unset_required_fields(jsonified_request) + ).delete_aws_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "aws_cluster_id", + "allow_missing", + "etag", + "ignore_errors", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "awsClusterId" in jsonified_request - assert jsonified_request["awsClusterId"] == "aws_cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4266,10 +6387,9 @@ def test_create_aws_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4279,43 +6399,34 @@ def test_create_aws_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_aws_cluster(request) + response = client.delete_aws_cluster(request) - expected_params = [ - ( - "awsClusterId", - "", - ), - ] + expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_aws_cluster_rest_unset_required_fields(): +def test_delete_aws_cluster_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_aws_cluster._get_unset_required_fields({}) + unset_fields = transport.delete_aws_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "awsClusterId", + "allowMissing", + "etag", + "ignoreErrors", "validateOnly", ) ) - & set( - ( - "parent", - "awsCluster", - "awsClusterId", - ) - ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_aws_cluster_rest_interceptors(null_interceptor): +def test_delete_aws_cluster_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4330,14 +6441,14 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_create_aws_cluster" + transports.AwsClustersRestInterceptor, "post_delete_aws_cluster" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_create_aws_cluster" + transports.AwsClustersRestInterceptor, "pre_delete_aws_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.CreateAwsClusterRequest.pb( - aws_service.CreateAwsClusterRequest() + pb_message = aws_service.DeleteAwsClusterRequest.pb( + aws_service.DeleteAwsClusterRequest() ) transcode.return_value = { "method": "post", @@ -4353,7 +6464,7 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = aws_service.CreateAwsClusterRequest() + request = aws_service.DeleteAwsClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4361,7 +6472,7 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_aws_cluster( + client.delete_aws_cluster( request, metadata=[ ("key", "val"), @@ -4373,8 +6484,8 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_aws_cluster_rest_bad_request( - transport: str = "rest", request_type=aws_service.CreateAwsClusterRequest +def test_delete_aws_cluster_rest_bad_request( + transport: str = "rest", request_type=aws_service.DeleteAwsClusterRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4382,7 +6493,7 @@ def test_create_aws_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4394,10 +6505,10 @@ def test_create_aws_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_aws_cluster(request) + client.delete_aws_cluster(request) -def test_create_aws_cluster_rest_flattened(): +def test_delete_aws_cluster_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4408,237 +6519,114 @@ def test_create_aws_cluster_rest_flattened(): # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - aws_cluster=aws_resources.AwsCluster(name="name_value"), - aws_cluster_id="aws_cluster_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_aws_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/awsClusters" - % client.transport._host, - args[1], - ) - - -def test_create_aws_cluster_rest_flattened_error(transport: str = "rest"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_aws_cluster( - aws_service.CreateAwsClusterRequest(), - parent="parent_value", - aws_cluster=aws_resources.AwsCluster(name="name_value"), - aws_cluster_id="aws_cluster_id_value", - ) - - -def test_create_aws_cluster_rest_error(): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - aws_service.UpdateAwsClusterRequest, - dict, - ], -) -def test_update_aws_cluster_rest(request_type): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "aws_cluster": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3" - } - } - request_init["aws_cluster"] = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3", - "description": "description_value", - "networking": { - "vpc_id": "vpc_id_value", - "pod_address_cidr_blocks": [ - "pod_address_cidr_blocks_value1", - "pod_address_cidr_blocks_value2", - ], - "service_address_cidr_blocks": [ - "service_address_cidr_blocks_value1", - "service_address_cidr_blocks_value2", - ], - }, - "aws_region": "aws_region_value", - "control_plane": { - "version": "version_value", - "instance_type": "instance_type_value", - "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, - "subnet_ids": ["subnet_ids_value1", "subnet_ids_value2"], - "security_group_ids": [ - "security_group_ids_value1", - "security_group_ids_value2", - ], - "iam_instance_profile": "iam_instance_profile_value", - "root_volume": { - "size_gib": 844, - "volume_type": 1, - "iops": 443, - "kms_key_arn": "kms_key_arn_value", - }, - "main_volume": {}, - "database_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "tags": {}, - "aws_services_authentication": { - "role_arn": "role_arn_value", - "role_session_name": "role_session_name_value", - }, - "proxy_config": { - "secret_arn": "secret_arn_value", - "secret_version": "secret_version_value", - }, - "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "instance_placement": {"tenancy": 1}, - }, - "authorization": {"admin_users": [{"username": "username_value"}]}, - "state": 1, - "endpoint": "endpoint_value", - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "workload_identity_config": { - "issuer_uri": "issuer_uri_value", - "workload_pool": "workload_pool_value", - "identity_provider": "identity_provider_value", - }, - "cluster_ca_certificate": "cluster_ca_certificate_value", - "fleet": {"project": "project_value", "membership": "membership_value"}, - "logging_config": {"component_config": {"enable_components": [1]}}, - "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3" + } - # Determine if the message type is proto-plus or protobuf - test_field = aws_service.UpdateAwsClusterRequest.meta.fields["aws_cluster"] + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + client.delete_aws_cluster(**mock_args) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/awsClusters/*}" + % client.transport._host, + args[1], + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_delete_aws_cluster_rest_flattened_error(transport: str = "rest"): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aws_cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_aws_cluster( + aws_service.DeleteAwsClusterRequest(), + name="name_value", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aws_cluster"][field])): - del request_init["aws_cluster"][field][i][subfield] - else: - del request_init["aws_cluster"][field][subfield] +def test_delete_aws_cluster_rest_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.GenerateAwsClusterAgentTokenRequest, + dict, + ], +) +def test_generate_aws_cluster_agent_token_rest(request_type): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_service.GenerateAwsClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_service.GenerateAwsClusterAgentTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_aws_cluster(request) + response = client.generate_aws_cluster_agent_token(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, aws_service.GenerateAwsClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" -def test_update_aws_cluster_rest_required_fields( - request_type=aws_service.UpdateAwsClusterRequest, +def test_generate_aws_cluster_agent_token_rest_required_fields( + request_type=aws_service.GenerateAwsClusterAgentTokenRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} + request_init["aws_cluster"] = "" + request_init["subject_token"] = "" + request_init["subject_token_type"] = "" + request_init["version"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4653,24 +6641,30 @@ def test_update_aws_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aws_cluster._get_unset_required_fields(jsonified_request) + ).generate_aws_cluster_agent_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["awsCluster"] = "aws_cluster_value" + jsonified_request["subjectToken"] = "subject_token_value" + jsonified_request["subjectTokenType"] = "subject_token_type_value" + jsonified_request["version"] = "version_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aws_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "update_mask", - "validate_only", - ) - ) + ).generate_aws_cluster_agent_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "awsCluster" in jsonified_request + assert jsonified_request["awsCluster"] == "aws_cluster_value" + assert "subjectToken" in jsonified_request + assert jsonified_request["subjectToken"] == "subject_token_value" + assert "subjectTokenType" in jsonified_request + assert jsonified_request["subjectTokenType"] == "subject_token_type_value" + assert "version" in jsonified_request + assert jsonified_request["version"] == "version_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4679,7 +6673,7 @@ def test_update_aws_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_service.GenerateAwsClusterAgentTokenResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4691,7 +6685,7 @@ def test_update_aws_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4699,42 +6693,46 @@ def test_update_aws_cluster_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_service.GenerateAwsClusterAgentTokenResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_aws_cluster(request) + response = client.generate_aws_cluster_agent_token(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_aws_cluster_rest_unset_required_fields(): +def test_generate_aws_cluster_agent_token_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_aws_cluster._get_unset_required_fields({}) + unset_fields = ( + transport.generate_aws_cluster_agent_token._get_unset_required_fields({}) + ) assert set(unset_fields) == ( - set( - ( - "updateMask", - "validateOnly", - ) - ) + set(()) & set( ( "awsCluster", - "updateMask", + "subjectToken", + "subjectTokenType", + "version", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_aws_cluster_rest_interceptors(null_interceptor): +def test_generate_aws_cluster_agent_token_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4747,16 +6745,14 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_update_aws_cluster" + transports.AwsClustersRestInterceptor, "post_generate_aws_cluster_agent_token" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_update_aws_cluster" + transports.AwsClustersRestInterceptor, "pre_generate_aws_cluster_agent_token" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.UpdateAwsClusterRequest.pb( - aws_service.UpdateAwsClusterRequest() + pb_message = aws_service.GenerateAwsClusterAgentTokenRequest.pb( + aws_service.GenerateAwsClusterAgentTokenRequest() ) transcode.return_value = { "method": "post", @@ -4768,19 +6764,21 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + aws_service.GenerateAwsClusterAgentTokenResponse.to_json( + aws_service.GenerateAwsClusterAgentTokenResponse() + ) ) - request = aws_service.UpdateAwsClusterRequest() + request = aws_service.GenerateAwsClusterAgentTokenRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = aws_service.GenerateAwsClusterAgentTokenResponse() - client.update_aws_cluster( + client.generate_aws_cluster_agent_token( request, metadata=[ ("key", "val"), @@ -4792,8 +6790,9 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_aws_cluster_rest_bad_request( - transport: str = "rest", request_type=aws_service.UpdateAwsClusterRequest +def test_generate_aws_cluster_agent_token_rest_bad_request( + transport: str = "rest", + request_type=aws_service.GenerateAwsClusterAgentTokenRequest, ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4802,9 +6801,7 @@ def test_update_aws_cluster_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "aws_cluster": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3" - } + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" } request = request_type(**request_init) @@ -4817,71 +6814,10 @@ def test_update_aws_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_aws_cluster(request) - - -def test_update_aws_cluster_rest_flattened(): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "aws_cluster": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - aws_cluster=aws_resources.AwsCluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_aws_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{aws_cluster.name=projects/*/locations/*/awsClusters/*}" - % client.transport._host, - args[1], - ) - - -def test_update_aws_cluster_rest_flattened_error(transport: str = "rest"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_aws_cluster( - aws_service.UpdateAwsClusterRequest(), - aws_cluster=aws_resources.AwsCluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.generate_aws_cluster_agent_token(request) -def test_update_aws_cluster_rest_error(): +def test_generate_aws_cluster_agent_token_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4890,66 +6826,52 @@ def test_update_aws_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.GetAwsClusterRequest, + aws_service.GenerateAwsAccessTokenRequest, dict, ], ) -def test_get_aws_cluster_rest(request_type): +def test_generate_aws_access_token_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsCluster( - name="name_value", - description="description_value", - aws_region="aws_region_value", - state=aws_resources.AwsCluster.State.PROVISIONING, - endpoint="endpoint_value", - uid="uid_value", - reconciling=True, - etag="etag_value", - cluster_ca_certificate="cluster_ca_certificate_value", + request_init = { + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = aws_service.GenerateAwsAccessTokenResponse( + access_token="access_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = aws_resources.AwsCluster.pb(return_value) + return_value = aws_service.GenerateAwsAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_aws_cluster(request) + response = client.generate_aws_access_token(request) # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsCluster) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.aws_region == "aws_region_value" - assert response.state == aws_resources.AwsCluster.State.PROVISIONING - assert response.endpoint == "endpoint_value" - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.cluster_ca_certificate == "cluster_ca_certificate_value" + assert isinstance(response, aws_service.GenerateAwsAccessTokenResponse) + assert response.access_token == "access_token_value" -def test_get_aws_cluster_rest_required_fields( - request_type=aws_service.GetAwsClusterRequest, +def test_generate_aws_access_token_rest_required_fields( + request_type=aws_service.GenerateAwsAccessTokenRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["aws_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4964,21 +6886,21 @@ def test_get_aws_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aws_cluster._get_unset_required_fields(jsonified_request) + ).generate_aws_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["awsCluster"] = "aws_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aws_cluster._get_unset_required_fields(jsonified_request) + ).generate_aws_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "awsCluster" in jsonified_request + assert jsonified_request["awsCluster"] == "aws_cluster_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4987,7 +6909,7 @@ def test_get_aws_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsCluster() + return_value = aws_service.GenerateAwsAccessTokenResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5008,30 +6930,30 @@ def test_get_aws_cluster_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = aws_resources.AwsCluster.pb(return_value) + return_value = aws_service.GenerateAwsAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_aws_cluster(request) + response = client.generate_aws_access_token(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_aws_cluster_rest_unset_required_fields(): +def test_generate_aws_access_token_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_aws_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.generate_aws_access_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("awsCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_aws_cluster_rest_interceptors(null_interceptor): +def test_generate_aws_access_token_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5044,14 +6966,14 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AwsClustersRestInterceptor, "post_get_aws_cluster" + transports.AwsClustersRestInterceptor, "post_generate_aws_access_token" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_get_aws_cluster" + transports.AwsClustersRestInterceptor, "pre_generate_aws_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.GetAwsClusterRequest.pb( - aws_service.GetAwsClusterRequest() + pb_message = aws_service.GenerateAwsAccessTokenRequest.pb( + aws_service.GenerateAwsAccessTokenRequest() ) transcode.return_value = { "method": "post", @@ -5063,19 +6985,19 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = aws_resources.AwsCluster.to_json( - aws_resources.AwsCluster() + req.return_value._content = aws_service.GenerateAwsAccessTokenResponse.to_json( + aws_service.GenerateAwsAccessTokenResponse() ) - request = aws_service.GetAwsClusterRequest() + request = aws_service.GenerateAwsAccessTokenRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = aws_resources.AwsCluster() + post.return_value = aws_service.GenerateAwsAccessTokenResponse() - client.get_aws_cluster( + client.generate_aws_access_token( request, metadata=[ ("key", "val"), @@ -5087,8 +7009,8 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_aws_cluster_rest_bad_request( - transport: str = "rest", request_type=aws_service.GetAwsClusterRequest +def test_generate_aws_access_token_rest_bad_request( + transport: str = "rest", request_type=aws_service.GenerateAwsAccessTokenRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5096,7 +7018,9 @@ def test_get_aws_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init = { + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5108,69 +7032,10 @@ def test_get_aws_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_aws_cluster(request) - - -def test_get_aws_cluster_rest_flattened(): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsCluster() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_resources.AwsCluster.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_aws_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/awsClusters/*}" - % client.transport._host, - args[1], - ) - - -def test_get_aws_cluster_rest_flattened_error(transport: str = "rest"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_aws_cluster( - aws_service.GetAwsClusterRequest(), - name="name_value", - ) + client.generate_aws_access_token(request) -def test_get_aws_cluster_rest_error(): +def test_generate_aws_access_token_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5179,50 +7044,165 @@ def test_get_aws_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.ListAwsClustersRequest, + aws_service.CreateAwsNodePoolRequest, dict, ], ) -def test_list_aws_clusters_rest(request_type): +def test_create_aws_node_pool_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init["aws_node_pool"] = { + "name": "name_value", + "version": "version_value", + "config": { + "instance_type": "instance_type_value", + "root_volume": { + "size_gib": 844, + "volume_type": 1, + "iops": 443, + "throughput": 1114, + "kms_key_arn": "kms_key_arn_value", + }, + "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], + "labels": {}, + "tags": {}, + "iam_instance_profile": "iam_instance_profile_value", + "image_type": "image_type_value", + "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, + "security_group_ids": [ + "security_group_ids_value1", + "security_group_ids_value2", + ], + "proxy_config": { + "secret_arn": "secret_arn_value", + "secret_version": "secret_version_value", + }, + "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "instance_placement": {"tenancy": 1}, + "autoscaling_metrics_collection": { + "granularity": "granularity_value", + "metrics": ["metrics_value1", "metrics_value2"], + }, + "spot_config": { + "instance_types": ["instance_types_value1", "instance_types_value2"] + }, + }, + "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, + "subnet_id": "subnet_id_value", + "state": 1, + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "max_pods_constraint": {"max_pods_per_node": 1798}, + "errors": [{"message": "message_value"}], + "management": {"auto_repair": True}, + "update_settings": { + "surge_settings": {"max_surge": 971, "max_unavailable": 1577} + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = aws_service.CreateAwsNodePoolRequest.meta.fields["aws_node_pool"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aws_node_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aws_node_pool"][field])): + del request_init["aws_node_pool"][field][i][subfield] + else: + del request_init["aws_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsClustersResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_service.ListAwsClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_aws_clusters(request) + response = client.create_aws_node_pool(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAwsClustersPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_aws_clusters_rest_required_fields( - request_type=aws_service.ListAwsClustersRequest, +def test_create_aws_node_pool_rest_required_fields( + request_type=aws_service.CreateAwsNodePoolRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} request_init["parent"] = "" + request_init["aws_node_pool_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5234,24 +7214,28 @@ def test_list_aws_clusters_rest_required_fields( ) # verify fields with default values are dropped + assert "awsNodePoolId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aws_clusters._get_unset_required_fields(jsonified_request) + ).create_aws_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "awsNodePoolId" in jsonified_request + assert jsonified_request["awsNodePoolId"] == request_init["aws_node_pool_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["awsNodePoolId"] = "aws_node_pool_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aws_clusters._get_unset_required_fields(jsonified_request) + ).create_aws_node_pool._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "aws_node_pool_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -5259,6 +7243,8 @@ def test_list_aws_clusters_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "awsNodePoolId" in jsonified_request + assert jsonified_request["awsNodePoolId"] == "aws_node_pool_id_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5267,7 +7253,7 @@ def test_list_aws_clusters_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5279,47 +7265,56 @@ def test_list_aws_clusters_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = aws_service.ListAwsClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_aws_clusters(request) + response = client.create_aws_node_pool(request) - expected_params = [] + expected_params = [ + ( + "awsNodePoolId", + "", + ), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_aws_clusters_rest_unset_required_fields(): +def test_create_aws_node_pool_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_aws_clusters._get_unset_required_fields({}) + unset_fields = transport.create_aws_node_pool._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "awsNodePoolId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "awsNodePool", + "awsNodePoolId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_aws_clusters_rest_interceptors(null_interceptor): +def test_create_aws_node_pool_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5332,14 +7327,16 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AwsClustersRestInterceptor, "post_list_aws_clusters" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AwsClustersRestInterceptor, "post_create_aws_node_pool" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_list_aws_clusters" + transports.AwsClustersRestInterceptor, "pre_create_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.ListAwsClustersRequest.pb( - aws_service.ListAwsClustersRequest() + pb_message = aws_service.CreateAwsNodePoolRequest.pb( + aws_service.CreateAwsNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -5351,19 +7348,19 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = aws_service.ListAwsClustersResponse.to_json( - aws_service.ListAwsClustersResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = aws_service.ListAwsClustersRequest() + request = aws_service.CreateAwsNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = aws_service.ListAwsClustersResponse() + post.return_value = operations_pb2.Operation() - client.list_aws_clusters( + client.create_aws_node_pool( request, metadata=[ ("key", "val"), @@ -5375,8 +7372,8 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_aws_clusters_rest_bad_request( - transport: str = "rest", request_type=aws_service.ListAwsClustersRequest +def test_create_aws_node_pool_rest_bad_request( + transport: str = "rest", request_type=aws_service.CreateAwsNodePoolRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5384,7 +7381,7 @@ def test_list_aws_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5396,10 +7393,10 @@ def test_list_aws_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_aws_clusters(request) + client.create_aws_node_pool(request) -def test_list_aws_clusters_rest_flattened(): +def test_create_aws_node_pool_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,40 +7405,42 @@ def test_list_aws_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsClustersResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/awsClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", + aws_node_pool=aws_resources.AwsNodePool(name="name_value"), + aws_node_pool_id="aws_node_pool_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_service.ListAwsClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_aws_clusters(**mock_args) + client.create_aws_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/awsClusters" + "%s/v1/{parent=projects/*/locations/*/awsClusters/*}/awsNodePools" % client.transport._host, args[1], ) -def test_list_aws_clusters_rest_flattened_error(transport: str = "rest"): +def test_create_aws_node_pool_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5450,90 +7449,158 @@ def test_list_aws_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_aws_clusters( - aws_service.ListAwsClustersRequest(), + client.create_aws_node_pool( + aws_service.CreateAwsNodePoolRequest(), parent="parent_value", + aws_node_pool=aws_resources.AwsNodePool(name="name_value"), + aws_node_pool_id="aws_node_pool_id_value", ) -def test_list_aws_clusters_rest_pager(transport: str = "rest"): +def test_create_aws_node_pool_rest_error(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + aws_service.UpdateAwsNodePoolRequest, + dict, + ], +) +def test_update_aws_node_pool_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - aws_service.ListAwsClustersResponse( - aws_clusters=[ - aws_resources.AwsCluster(), - aws_resources.AwsCluster(), - aws_resources.AwsCluster(), - ], - next_page_token="abc", - ), - aws_service.ListAwsClustersResponse( - aws_clusters=[], - next_page_token="def", - ), - aws_service.ListAwsClustersResponse( - aws_clusters=[ - aws_resources.AwsCluster(), - ], - next_page_token="ghi", - ), - aws_service.ListAwsClustersResponse( - aws_clusters=[ - aws_resources.AwsCluster(), - aws_resources.AwsCluster(), - ], - ), - ) - # Two responses for two calls - response = response + response + # send a request that will satisfy transcoding + request_init = { + "aws_node_pool": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } + } + request_init["aws_node_pool"] = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4", + "version": "version_value", + "config": { + "instance_type": "instance_type_value", + "root_volume": { + "size_gib": 844, + "volume_type": 1, + "iops": 443, + "throughput": 1114, + "kms_key_arn": "kms_key_arn_value", + }, + "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], + "labels": {}, + "tags": {}, + "iam_instance_profile": "iam_instance_profile_value", + "image_type": "image_type_value", + "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, + "security_group_ids": [ + "security_group_ids_value1", + "security_group_ids_value2", + ], + "proxy_config": { + "secret_arn": "secret_arn_value", + "secret_version": "secret_version_value", + }, + "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, + "instance_placement": {"tenancy": 1}, + "autoscaling_metrics_collection": { + "granularity": "granularity_value", + "metrics": ["metrics_value1", "metrics_value2"], + }, + "spot_config": { + "instance_types": ["instance_types_value1", "instance_types_value2"] + }, + }, + "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, + "subnet_id": "subnet_id_value", + "state": 1, + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "max_pods_constraint": {"max_pods_per_node": 1798}, + "errors": [{"message": "message_value"}], + "management": {"auto_repair": True}, + "update_settings": { + "surge_settings": {"max_surge": 971, "max_unavailable": 1577} + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the values into proper Response objs - response = tuple( - aws_service.ListAwsClustersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Determine if the message type is proto-plus or protobuf + test_field = aws_service.UpdateAwsNodePoolRequest.meta.fields["aws_node_pool"] - sample_request = {"parent": "projects/sample1/locations/sample2"} + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pager = client.list_aws_clusters(request=sample_request) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, aws_resources.AwsCluster) for i in results) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - pages = list(client.list_aws_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["aws_node_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - aws_service.DeleteAwsClusterRequest, - dict, - ], -) -def test_delete_aws_cluster_rest(request_type): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["aws_node_pool"][field])): + del request_init["aws_node_pool"][field][i][subfield] + else: + del request_init["aws_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5548,19 +7615,18 @@ def test_delete_aws_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_aws_cluster(request) + response = client.update_aws_node_pool(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_aws_cluster_rest_required_fields( - request_type=aws_service.DeleteAwsClusterRequest, +def test_update_aws_node_pool_rest_required_fields( + request_type=aws_service.UpdateAwsNodePoolRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5575,29 +7641,24 @@ def test_delete_aws_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aws_cluster._get_unset_required_fields(jsonified_request) + ).update_aws_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aws_cluster._get_unset_required_fields(jsonified_request) + ).update_aws_node_pool._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", + "update_mask", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5618,9 +7679,10 @@ def test_delete_aws_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -5630,33 +7692,37 @@ def test_delete_aws_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_aws_cluster(request) + response = client.update_aws_node_pool(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_aws_cluster_rest_unset_required_fields(): +def test_update_aws_node_pool_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_aws_cluster._get_unset_required_fields({}) + unset_fields = transport.update_aws_node_pool._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", + "updateMask", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "awsNodePool", + "updateMask", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_aws_cluster_rest_interceptors(null_interceptor): +def test_update_aws_node_pool_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5671,14 +7737,14 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_delete_aws_cluster" + transports.AwsClustersRestInterceptor, "post_update_aws_node_pool" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_delete_aws_cluster" + transports.AwsClustersRestInterceptor, "pre_update_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.DeleteAwsClusterRequest.pb( - aws_service.DeleteAwsClusterRequest() + pb_message = aws_service.UpdateAwsNodePoolRequest.pb( + aws_service.UpdateAwsNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -5694,7 +7760,7 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = aws_service.DeleteAwsClusterRequest() + request = aws_service.UpdateAwsNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5702,7 +7768,7 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_aws_cluster( + client.update_aws_node_pool( request, metadata=[ ("key", "val"), @@ -5714,8 +7780,8 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_aws_cluster_rest_bad_request( - transport: str = "rest", request_type=aws_service.DeleteAwsClusterRequest +def test_update_aws_node_pool_rest_bad_request( + transport: str = "rest", request_type=aws_service.UpdateAwsNodePoolRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5723,7 +7789,11 @@ def test_delete_aws_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init = { + "aws_node_pool": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5735,10 +7805,10 @@ def test_delete_aws_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_aws_cluster(request) + client.update_aws_node_pool(request) -def test_delete_aws_cluster_rest_flattened(): +def test_update_aws_node_pool_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5751,12 +7821,15 @@ def test_delete_aws_cluster_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3" + "aws_node_pool": { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + aws_node_pool=aws_resources.AwsNodePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5767,20 +7840,20 @@ def test_delete_aws_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_aws_cluster(**mock_args) + client.update_aws_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/awsClusters/*}" + "%s/v1/{aws_node_pool.name=projects/*/locations/*/awsClusters/*/awsNodePools/*}" % client.transport._host, args[1], ) -def test_delete_aws_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_aws_node_pool_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5789,13 +7862,14 @@ def test_delete_aws_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_aws_cluster( - aws_service.DeleteAwsClusterRequest(), - name="name_value", + client.update_aws_node_pool( + aws_service.UpdateAwsNodePoolRequest(), + aws_node_pool=aws_resources.AwsNodePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_aws_cluster_rest_error(): +def test_update_aws_node_pool_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5804,11 +7878,11 @@ def test_delete_aws_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.GenerateAwsAccessTokenRequest, + aws_service.RollbackAwsNodePoolUpdateRequest, dict, ], ) -def test_generate_aws_access_token_rest(request_type): +def test_rollback_aws_node_pool_update_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5816,40 +7890,35 @@ def test_generate_aws_access_token_rest(request_type): # send a request that will satisfy transcoding request_init = { - "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_service.GenerateAwsAccessTokenResponse( - access_token="access_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_service.GenerateAwsAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_aws_access_token(request) + response = client.rollback_aws_node_pool_update(request) # Establish that the response is the type that we expect. - assert isinstance(response, aws_service.GenerateAwsAccessTokenResponse) - assert response.access_token == "access_token_value" + assert response.operation.name == "operations/spam" -def test_generate_aws_access_token_rest_required_fields( - request_type=aws_service.GenerateAwsAccessTokenRequest, +def test_rollback_aws_node_pool_update_rest_required_fields( + request_type=aws_service.RollbackAwsNodePoolUpdateRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["aws_cluster"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5864,21 +7933,21 @@ def test_generate_aws_access_token_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_aws_access_token._get_unset_required_fields(jsonified_request) + ).rollback_aws_node_pool_update._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["awsCluster"] = "aws_cluster_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_aws_access_token._get_unset_required_fields(jsonified_request) + ).rollback_aws_node_pool_update._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "awsCluster" in jsonified_request - assert jsonified_request["awsCluster"] == "aws_cluster_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5887,7 +7956,7 @@ def test_generate_aws_access_token_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = aws_service.GenerateAwsAccessTokenResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5899,39 +7968,39 @@ def test_generate_aws_access_token_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = aws_service.GenerateAwsAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_aws_access_token(request) + response = client.rollback_aws_node_pool_update(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_generate_aws_access_token_rest_unset_required_fields(): +def test_rollback_aws_node_pool_update_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.generate_aws_access_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("awsCluster",))) + unset_fields = transport.rollback_aws_node_pool_update._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_aws_access_token_rest_interceptors(null_interceptor): +def test_rollback_aws_node_pool_update_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5944,14 +8013,16 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AwsClustersRestInterceptor, "post_generate_aws_access_token" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AwsClustersRestInterceptor, "post_rollback_aws_node_pool_update" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_generate_aws_access_token" + transports.AwsClustersRestInterceptor, "pre_rollback_aws_node_pool_update" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.GenerateAwsAccessTokenRequest.pb( - aws_service.GenerateAwsAccessTokenRequest() + pb_message = aws_service.RollbackAwsNodePoolUpdateRequest.pb( + aws_service.RollbackAwsNodePoolUpdateRequest() ) transcode.return_value = { "method": "post", @@ -5963,19 +8034,19 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = aws_service.GenerateAwsAccessTokenResponse.to_json( - aws_service.GenerateAwsAccessTokenResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = aws_service.GenerateAwsAccessTokenRequest() + request = aws_service.RollbackAwsNodePoolUpdateRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = aws_service.GenerateAwsAccessTokenResponse() + post.return_value = operations_pb2.Operation() - client.generate_aws_access_token( + client.rollback_aws_node_pool_update( request, metadata=[ ("key", "val"), @@ -5983,37 +8054,94 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): ], ) - pre.assert_called_once() - post.assert_called_once() + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_aws_node_pool_update_rest_bad_request( + transport: str = "rest", request_type=aws_service.RollbackAwsNodePoolUpdateRequest +): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback_aws_node_pool_update(request) + + +def test_rollback_aws_node_pool_update_rest_flattened(): + client = AwsClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rollback_aws_node_pool_update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/awsClusters/*/awsNodePools/*}:rollback" + % client.transport._host, + args[1], + ) -def test_generate_aws_access_token_rest_bad_request( - transport: str = "rest", request_type=aws_service.GenerateAwsAccessTokenRequest -): +def test_rollback_aws_node_pool_update_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.generate_aws_access_token(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_aws_node_pool_update( + aws_service.RollbackAwsNodePoolUpdateRequest(), + name="name_value", + ) -def test_generate_aws_access_token_rest_error(): +def test_rollback_aws_node_pool_update_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6022,157 +8150,64 @@ def test_generate_aws_access_token_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.CreateAwsNodePoolRequest, + aws_service.GetAwsNodePoolRequest, dict, ], ) -def test_create_aws_node_pool_rest(request_type): +def test_get_aws_node_pool_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} - request_init["aws_node_pool"] = { - "name": "name_value", - "version": "version_value", - "config": { - "instance_type": "instance_type_value", - "root_volume": { - "size_gib": 844, - "volume_type": 1, - "iops": 443, - "kms_key_arn": "kms_key_arn_value", - }, - "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], - "labels": {}, - "tags": {}, - "iam_instance_profile": "iam_instance_profile_value", - "image_type": "image_type_value", - "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, - "security_group_ids": [ - "security_group_ids_value1", - "security_group_ids_value2", - ], - "proxy_config": { - "secret_arn": "secret_arn_value", - "secret_version": "secret_version_value", - }, - "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "instance_placement": {"tenancy": 1}, - "autoscaling_metrics_collection": { - "granularity": "granularity_value", - "metrics": ["metrics_value1", "metrics_value2"], - }, - }, - "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, - "subnet_id": "subnet_id_value", - "state": 1, - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "max_pods_constraint": {"max_pods_per_node": 1798}, - "errors": [{"message": "message_value"}], + request_init = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = aws_service.CreateAwsNodePoolRequest.meta.fields["aws_node_pool"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aws_node_pool"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aws_node_pool"][field])): - del request_init["aws_node_pool"][field][i][subfield] - else: - del request_init["aws_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_resources.AwsNodePool( + name="name_value", + version="version_value", + subnet_id="subnet_id_value", + state=aws_resources.AwsNodePool.State.PROVISIONING, + uid="uid_value", + reconciling=True, + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_aws_node_pool(request) + response = client.get_aws_node_pool(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, aws_resources.AwsNodePool) + assert response.name == "name_value" + assert response.version == "version_value" + assert response.subnet_id == "subnet_id_value" + assert response.state == aws_resources.AwsNodePool.State.PROVISIONING + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" -def test_create_aws_node_pool_rest_required_fields( - request_type=aws_service.CreateAwsNodePoolRequest, +def test_get_aws_node_pool_rest_required_fields( + request_type=aws_service.GetAwsNodePoolRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["parent"] = "" - request_init["aws_node_pool_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6184,37 +8219,24 @@ def test_create_aws_node_pool_rest_required_fields( ) # verify fields with default values are dropped - assert "awsNodePoolId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aws_node_pool._get_unset_required_fields(jsonified_request) + ).get_aws_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "awsNodePoolId" in jsonified_request - assert jsonified_request["awsNodePoolId"] == request_init["aws_node_pool_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["awsNodePoolId"] = "aws_node_pool_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aws_node_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "aws_node_pool_id", - "validate_only", - ) - ) + ).get_aws_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "awsNodePoolId" in jsonified_request - assert jsonified_request["awsNodePoolId"] == "aws_node_pool_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6223,7 +8245,7 @@ def test_create_aws_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_resources.AwsNodePool() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6235,56 +8257,39 @@ def test_create_aws_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_aws_node_pool(request) + response = client.get_aws_node_pool(request) - expected_params = [ - ( - "awsNodePoolId", - "", - ), - ] + expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_aws_node_pool_rest_unset_required_fields(): +def test_get_aws_node_pool_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_aws_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "awsNodePoolId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "awsNodePool", - "awsNodePoolId", - ) - ) - ) + unset_fields = transport.get_aws_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_aws_node_pool_rest_interceptors(null_interceptor): +def test_get_aws_node_pool_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6297,16 +8302,14 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_create_aws_node_pool" + transports.AwsClustersRestInterceptor, "post_get_aws_node_pool" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_create_aws_node_pool" + transports.AwsClustersRestInterceptor, "pre_get_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.CreateAwsNodePoolRequest.pb( - aws_service.CreateAwsNodePoolRequest() + pb_message = aws_service.GetAwsNodePoolRequest.pb( + aws_service.GetAwsNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -6318,19 +8321,19 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = aws_resources.AwsNodePool.to_json( + aws_resources.AwsNodePool() ) - request = aws_service.CreateAwsNodePoolRequest() + request = aws_service.GetAwsNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = aws_resources.AwsNodePool() - client.create_aws_node_pool( + client.get_aws_node_pool( request, metadata=[ ("key", "val"), @@ -6342,8 +8345,8 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_aws_node_pool_rest_bad_request( - transport: str = "rest", request_type=aws_service.CreateAwsNodePoolRequest +def test_get_aws_node_pool_rest_bad_request( + transport: str = "rest", request_type=aws_service.GetAwsNodePoolRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6351,7 +8354,9 @@ def test_create_aws_node_pool_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6363,10 +8368,10 @@ def test_create_aws_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_aws_node_pool(request) + client.get_aws_node_pool(request) -def test_create_aws_node_pool_rest_flattened(): +def test_get_aws_node_pool_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6375,42 +8380,42 @@ def test_create_aws_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_resources.AwsNodePool() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/awsClusters/sample3" + "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - aws_node_pool=aws_resources.AwsNodePool(name="name_value"), - aws_node_pool_id="aws_node_pool_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_aws_node_pool(**mock_args) + client.get_aws_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/awsClusters/*}/awsNodePools" + "%s/v1/{name=projects/*/locations/*/awsClusters/*/awsNodePools/*}" % client.transport._host, args[1], ) -def test_create_aws_node_pool_rest_flattened_error(transport: str = "rest"): +def test_get_aws_node_pool_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6419,15 +8424,13 @@ def test_create_aws_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_aws_node_pool( - aws_service.CreateAwsNodePoolRequest(), - parent="parent_value", - aws_node_pool=aws_resources.AwsNodePool(name="name_value"), - aws_node_pool_id="aws_node_pool_id_value", + client.get_aws_node_pool( + aws_service.GetAwsNodePoolRequest(), + name="name_value", ) -def test_create_aws_node_pool_rest_error(): +def test_get_aws_node_pool_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6436,159 +8439,50 @@ def test_create_aws_node_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.UpdateAwsNodePoolRequest, + aws_service.ListAwsNodePoolsRequest, dict, ], ) -def test_update_aws_node_pool_rest(request_type): +def test_list_aws_node_pools_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "aws_node_pool": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" - } - } - request_init["aws_node_pool"] = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4", - "version": "version_value", - "config": { - "instance_type": "instance_type_value", - "root_volume": { - "size_gib": 844, - "volume_type": 1, - "iops": 443, - "kms_key_arn": "kms_key_arn_value", - }, - "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], - "labels": {}, - "tags": {}, - "iam_instance_profile": "iam_instance_profile_value", - "image_type": "image_type_value", - "ssh_config": {"ec2_key_pair": "ec2_key_pair_value"}, - "security_group_ids": [ - "security_group_ids_value1", - "security_group_ids_value2", - ], - "proxy_config": { - "secret_arn": "secret_arn_value", - "secret_version": "secret_version_value", - }, - "config_encryption": {"kms_key_arn": "kms_key_arn_value"}, - "instance_placement": {"tenancy": 1}, - "autoscaling_metrics_collection": { - "granularity": "granularity_value", - "metrics": ["metrics_value1", "metrics_value2"], - }, - }, - "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, - "subnet_id": "subnet_id_value", - "state": 1, - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "max_pods_constraint": {"max_pods_per_node": 1798}, - "errors": [{"message": "message_value"}], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = aws_service.UpdateAwsNodePoolRequest.meta.fields["aws_node_pool"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aws_node_pool"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aws_node_pool"][field])): - del request_init["aws_node_pool"][field][i][subfield] - else: - del request_init["aws_node_pool"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_service.ListAwsNodePoolsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_aws_node_pool(request) + response = client.list_aws_node_pools(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListAwsNodePoolsPager) + assert response.next_page_token == "next_page_token_value" -def test_update_aws_node_pool_rest_required_fields( - request_type=aws_service.UpdateAwsNodePoolRequest, +def test_list_aws_node_pools_rest_required_fields( + request_type=aws_service.ListAwsNodePoolsRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6603,24 +8497,28 @@ def test_update_aws_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aws_node_pool._get_unset_required_fields(jsonified_request) + ).list_aws_node_pools._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aws_node_pool._get_unset_required_fields(jsonified_request) + ).list_aws_node_pools._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6629,7 +8527,7 @@ def test_update_aws_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_service.ListAwsNodePoolsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6641,50 +8539,47 @@ def test_update_aws_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_aws_node_pool(request) + response = client.list_aws_node_pools(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_aws_node_pool_rest_unset_required_fields(): +def test_list_aws_node_pools_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_aws_node_pool._get_unset_required_fields({}) + unset_fields = transport.list_aws_node_pools._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "awsNodePool", - "updateMask", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_aws_node_pool_rest_interceptors(null_interceptor): +def test_list_aws_node_pools_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6697,16 +8592,14 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_update_aws_node_pool" + transports.AwsClustersRestInterceptor, "post_list_aws_node_pools" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_update_aws_node_pool" + transports.AwsClustersRestInterceptor, "pre_list_aws_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.UpdateAwsNodePoolRequest.pb( - aws_service.UpdateAwsNodePoolRequest() + pb_message = aws_service.ListAwsNodePoolsRequest.pb( + aws_service.ListAwsNodePoolsRequest() ) transcode.return_value = { "method": "post", @@ -6718,19 +8611,19 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = aws_service.ListAwsNodePoolsResponse.to_json( + aws_service.ListAwsNodePoolsResponse() ) - request = aws_service.UpdateAwsNodePoolRequest() + request = aws_service.ListAwsNodePoolsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = aws_service.ListAwsNodePoolsResponse() - client.update_aws_node_pool( + client.list_aws_node_pools( request, metadata=[ ("key", "val"), @@ -6742,8 +8635,8 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_aws_node_pool_rest_bad_request( - transport: str = "rest", request_type=aws_service.UpdateAwsNodePoolRequest +def test_list_aws_node_pools_rest_bad_request( + transport: str = "rest", request_type=aws_service.ListAwsNodePoolsRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6751,11 +8644,7 @@ def test_update_aws_node_pool_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "aws_node_pool": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" - } - } + request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6767,10 +8656,10 @@ def test_update_aws_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_aws_node_pool(request) + client.list_aws_node_pools(request) -def test_update_aws_node_pool_rest_flattened(): +def test_list_aws_node_pools_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6779,43 +8668,42 @@ def test_update_aws_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_service.ListAwsNodePoolsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "aws_node_pool": { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" - } + "parent": "projects/sample1/locations/sample2/awsClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - aws_node_pool=aws_resources.AwsNodePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_aws_node_pool(**mock_args) + client.list_aws_node_pools(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{aws_node_pool.name=projects/*/locations/*/awsClusters/*/awsNodePools/*}" + "%s/v1/{parent=projects/*/locations/*/awsClusters/*}/awsNodePools" % client.transport._host, args[1], ) -def test_update_aws_node_pool_rest_flattened_error(transport: str = "rest"): +def test_list_aws_node_pools_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6824,27 +8712,85 @@ def test_update_aws_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_aws_node_pool( - aws_service.UpdateAwsNodePoolRequest(), - aws_node_pool=aws_resources.AwsNodePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_aws_node_pools( + aws_service.ListAwsNodePoolsRequest(), + parent="parent_value", ) -def test_update_aws_node_pool_rest_error(): +def test_list_aws_node_pools_rest_pager(transport: str = "rest"): client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + next_page_token="abc", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[], + next_page_token="def", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + ], + next_page_token="ghi", + ), + aws_service.ListAwsNodePoolsResponse( + aws_node_pools=[ + aws_resources.AwsNodePool(), + aws_resources.AwsNodePool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + aws_service.ListAwsNodePoolsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/awsClusters/sample3" + } + + pager = client.list_aws_node_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, aws_resources.AwsNodePool) for i in results) + + pages = list(client.list_aws_node_pools(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - aws_service.GetAwsNodePoolRequest, + aws_service.DeleteAwsNodePoolRequest, dict, ], ) -def test_get_aws_node_pool_rest(request_type): +def test_delete_aws_node_pool_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6859,40 +8805,23 @@ def test_get_aws_node_pool_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsNodePool( - name="name_value", - version="version_value", - subnet_id="subnet_id_value", - state=aws_resources.AwsNodePool.State.PROVISIONING, - uid="uid_value", - reconciling=True, - etag="etag_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_aws_node_pool(request) + response = client.delete_aws_node_pool(request) # Establish that the response is the type that we expect. - assert isinstance(response, aws_resources.AwsNodePool) - assert response.name == "name_value" - assert response.version == "version_value" - assert response.subnet_id == "subnet_id_value" - assert response.state == aws_resources.AwsNodePool.State.PROVISIONING - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" + assert response.operation.name == "operations/spam" -def test_get_aws_node_pool_rest_required_fields( - request_type=aws_service.GetAwsNodePoolRequest, +def test_delete_aws_node_pool_rest_required_fields( + request_type=aws_service.DeleteAwsNodePoolRequest, ): transport_class = transports.AwsClustersRestTransport @@ -6912,7 +8841,7 @@ def test_get_aws_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aws_node_pool._get_unset_required_fields(jsonified_request) + ).delete_aws_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6921,7 +8850,16 @@ def test_get_aws_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aws_node_pool._get_unset_required_fields(jsonified_request) + ).delete_aws_node_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "ignore_errors", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6935,7 +8873,7 @@ def test_get_aws_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsNodePool() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6947,39 +8885,46 @@ def test_get_aws_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_aws_node_pool(request) + response = client.delete_aws_node_pool(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_aws_node_pool_rest_unset_required_fields(): +def test_delete_aws_node_pool_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_aws_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_aws_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "ignoreErrors", + "validateOnly", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_aws_node_pool_rest_interceptors(null_interceptor): +def test_delete_aws_node_pool_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6992,14 +8937,16 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AwsClustersRestInterceptor, "post_get_aws_node_pool" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AwsClustersRestInterceptor, "post_delete_aws_node_pool" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_get_aws_node_pool" + transports.AwsClustersRestInterceptor, "pre_delete_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.GetAwsNodePoolRequest.pb( - aws_service.GetAwsNodePoolRequest() + pb_message = aws_service.DeleteAwsNodePoolRequest.pb( + aws_service.DeleteAwsNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -7011,19 +8958,19 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = aws_resources.AwsNodePool.to_json( - aws_resources.AwsNodePool() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = aws_service.GetAwsNodePoolRequest() + request = aws_service.DeleteAwsNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = aws_resources.AwsNodePool() + post.return_value = operations_pb2.Operation() - client.get_aws_node_pool( + client.delete_aws_node_pool( request, metadata=[ ("key", "val"), @@ -7035,8 +8982,8 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_aws_node_pool_rest_bad_request( - transport: str = "rest", request_type=aws_service.GetAwsNodePoolRequest +def test_delete_aws_node_pool_rest_bad_request( + transport: str = "rest", request_type=aws_service.DeleteAwsNodePoolRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7058,10 +9005,10 @@ def test_get_aws_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_aws_node_pool(request) + client.delete_aws_node_pool(request) -def test_get_aws_node_pool_rest_flattened(): +def test_delete_aws_node_pool_rest_flattened(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7070,7 +9017,7 @@ def test_get_aws_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_resources.AwsNodePool() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -7086,13 +9033,11 @@ def test_get_aws_node_pool_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_resources.AwsNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_aws_node_pool(**mock_args) + client.delete_aws_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -7105,7 +9050,7 @@ def test_get_aws_node_pool_rest_flattened(): ) -def test_get_aws_node_pool_rest_flattened_error(transport: str = "rest"): +def test_delete_aws_node_pool_rest_flattened_error(transport: str = "rest"): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7114,13 +9059,13 @@ def test_get_aws_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_aws_node_pool( - aws_service.GetAwsNodePoolRequest(), + client.delete_aws_node_pool( + aws_service.DeleteAwsNodePoolRequest(), name="name_value", ) -def test_get_aws_node_pool_rest_error(): +def test_delete_aws_node_pool_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7129,50 +9074,68 @@ def test_get_aws_node_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - aws_service.ListAwsNodePoolsRequest, + aws_service.GetAwsOpenIdConfigRequest, dict, ], ) -def test_list_aws_node_pools_rest(request_type): +def test_get_aws_open_id_config_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init = { + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsNodePoolsResponse( - next_page_token="next_page_token_value", + return_value = aws_resources.AwsOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" + ], + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) + return_value = aws_resources.AwsOpenIdConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_aws_node_pools(request) + response = client.get_aws_open_id_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAwsNodePoolsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, aws_resources.AwsOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" + ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] -def test_list_aws_node_pools_rest_required_fields( - request_type=aws_service.ListAwsNodePoolsRequest, +def test_get_aws_open_id_config_rest_required_fields( + request_type=aws_service.GetAwsOpenIdConfigRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["parent"] = "" + request_init["aws_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7187,28 +9150,21 @@ def test_list_aws_node_pools_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aws_node_pools._get_unset_required_fields(jsonified_request) + ).get_aws_open_id_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["awsCluster"] = "aws_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aws_node_pools._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_aws_open_id_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "awsCluster" in jsonified_request + assert jsonified_request["awsCluster"] == "aws_cluster_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7217,7 +9173,7 @@ def test_list_aws_node_pools_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsNodePoolsResponse() + return_value = aws_resources.AwsOpenIdConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7238,38 +9194,30 @@ def test_list_aws_node_pools_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) + return_value = aws_resources.AwsOpenIdConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_aws_node_pools(request) + response = client.get_aws_open_id_config(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_aws_node_pools_rest_unset_required_fields(): +def test_get_aws_open_id_config_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_aws_node_pools._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_aws_open_id_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("awsCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_aws_node_pools_rest_interceptors(null_interceptor): +def test_get_aws_open_id_config_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7282,14 +9230,14 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AwsClustersRestInterceptor, "post_list_aws_node_pools" + transports.AwsClustersRestInterceptor, "post_get_aws_open_id_config" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_list_aws_node_pools" + transports.AwsClustersRestInterceptor, "pre_get_aws_open_id_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.ListAwsNodePoolsRequest.pb( - aws_service.ListAwsNodePoolsRequest() + pb_message = aws_service.GetAwsOpenIdConfigRequest.pb( + aws_service.GetAwsOpenIdConfigRequest() ) transcode.return_value = { "method": "post", @@ -7301,19 +9249,19 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = aws_service.ListAwsNodePoolsResponse.to_json( - aws_service.ListAwsNodePoolsResponse() + req.return_value._content = aws_resources.AwsOpenIdConfig.to_json( + aws_resources.AwsOpenIdConfig() ) - request = aws_service.ListAwsNodePoolsRequest() + request = aws_service.GetAwsOpenIdConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = aws_service.ListAwsNodePoolsResponse() + post.return_value = aws_resources.AwsOpenIdConfig() - client.list_aws_node_pools( + client.get_aws_open_id_config( request, metadata=[ ("key", "val"), @@ -7325,8 +9273,8 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_aws_node_pools_rest_bad_request( - transport: str = "rest", request_type=aws_service.ListAwsNodePoolsRequest +def test_get_aws_open_id_config_rest_bad_request( + transport: str = "rest", request_type=aws_service.GetAwsOpenIdConfigRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7334,7 +9282,9 @@ def test_list_aws_node_pools_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/awsClusters/sample3"} + request_init = { + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7346,141 +9296,23 @@ def test_list_aws_node_pools_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_aws_node_pools(request) - - -def test_list_aws_node_pools_rest_flattened(): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = aws_service.ListAwsNodePoolsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/awsClusters/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = aws_service.ListAwsNodePoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_aws_node_pools(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/awsClusters/*}/awsNodePools" - % client.transport._host, - args[1], - ) - - -def test_list_aws_node_pools_rest_flattened_error(transport: str = "rest"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_aws_node_pools( - aws_service.ListAwsNodePoolsRequest(), - parent="parent_value", - ) + client.get_aws_open_id_config(request) -def test_list_aws_node_pools_rest_pager(transport: str = "rest"): +def test_get_aws_open_id_config_rest_error(): client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - next_page_token="abc", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[], - next_page_token="def", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - ], - next_page_token="ghi", - ), - aws_service.ListAwsNodePoolsResponse( - aws_node_pools=[ - aws_resources.AwsNodePool(), - aws_resources.AwsNodePool(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - aws_service.ListAwsNodePoolsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/awsClusters/sample3" - } - - pager = client.list_aws_node_pools(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, aws_resources.AwsNodePool) for i in results) - - pages = list(client.list_aws_node_pools(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - aws_service.DeleteAwsNodePoolRequest, + aws_service.GetAwsJsonWebKeysRequest, dict, ], ) -def test_delete_aws_node_pool_rest(request_type): +def test_get_aws_json_web_keys_rest(request_type): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7488,35 +9320,37 @@ def test_delete_aws_node_pool_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_resources.AwsJsonWebKeys() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = aws_resources.AwsJsonWebKeys.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_aws_node_pool(request) + response = client.get_aws_json_web_keys(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, aws_resources.AwsJsonWebKeys) -def test_delete_aws_node_pool_rest_required_fields( - request_type=aws_service.DeleteAwsNodePoolRequest, +def test_get_aws_json_web_keys_rest_required_fields( + request_type=aws_service.GetAwsJsonWebKeysRequest, ): transport_class = transports.AwsClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["aws_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7531,29 +9365,21 @@ def test_delete_aws_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aws_node_pool._get_unset_required_fields(jsonified_request) + ).get_aws_json_web_keys._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["awsCluster"] = "aws_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aws_node_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "validate_only", - ) - ) + ).get_aws_json_web_keys._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "awsCluster" in jsonified_request + assert jsonified_request["awsCluster"] == "aws_cluster_value" client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7562,7 +9388,7 @@ def test_delete_aws_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = aws_resources.AwsJsonWebKeys() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7574,45 +9400,39 @@ def test_delete_aws_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = aws_resources.AwsJsonWebKeys.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_aws_node_pool(request) + response = client.get_aws_json_web_keys(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_aws_node_pool_rest_unset_required_fields(): +def test_get_aws_json_web_keys_rest_unset_required_fields(): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_aws_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.get_aws_json_web_keys._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("awsCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_aws_node_pool_rest_interceptors(null_interceptor): +def test_get_aws_json_web_keys_rest_interceptors(null_interceptor): transport = transports.AwsClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7625,16 +9445,14 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AwsClustersRestInterceptor, "post_delete_aws_node_pool" + transports.AwsClustersRestInterceptor, "post_get_aws_json_web_keys" ) as post, mock.patch.object( - transports.AwsClustersRestInterceptor, "pre_delete_aws_node_pool" + transports.AwsClustersRestInterceptor, "pre_get_aws_json_web_keys" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = aws_service.DeleteAwsNodePoolRequest.pb( - aws_service.DeleteAwsNodePoolRequest() + pb_message = aws_service.GetAwsJsonWebKeysRequest.pb( + aws_service.GetAwsJsonWebKeysRequest() ) transcode.return_value = { "method": "post", @@ -7646,19 +9464,19 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = aws_resources.AwsJsonWebKeys.to_json( + aws_resources.AwsJsonWebKeys() ) - request = aws_service.DeleteAwsNodePoolRequest() + request = aws_service.GetAwsJsonWebKeysRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = aws_resources.AwsJsonWebKeys() - client.delete_aws_node_pool( + client.get_aws_json_web_keys( request, metadata=[ ("key", "val"), @@ -7670,8 +9488,8 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_aws_node_pool_rest_bad_request( - transport: str = "rest", request_type=aws_service.DeleteAwsNodePoolRequest +def test_get_aws_json_web_keys_rest_bad_request( + transport: str = "rest", request_type=aws_service.GetAwsJsonWebKeysRequest ): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7680,7 +9498,7 @@ def test_delete_aws_node_pool_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" + "aws_cluster": "projects/sample1/locations/sample2/awsClusters/sample3" } request = request_type(**request_init) @@ -7693,67 +9511,10 @@ def test_delete_aws_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_aws_node_pool(request) - - -def test_delete_aws_node_pool_rest_flattened(): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/awsClusters/sample3/awsNodePools/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client.get_aws_json_web_keys(request) - client.delete_aws_node_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/awsClusters/*/awsNodePools/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_aws_node_pool_rest_flattened_error(transport: str = "rest"): - client = AwsClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_aws_node_pool( - aws_service.DeleteAwsNodePoolRequest(), - name="name_value", - ) - -def test_delete_aws_node_pool_rest_error(): +def test_get_aws_json_web_keys_rest_error(): client = AwsClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8176,12 +9937,16 @@ def test_aws_clusters_base_transport(): "get_aws_cluster", "list_aws_clusters", "delete_aws_cluster", + "generate_aws_cluster_agent_token", "generate_aws_access_token", "create_aws_node_pool", "update_aws_node_pool", + "rollback_aws_node_pool_update", "get_aws_node_pool", "list_aws_node_pools", "delete_aws_node_pool", + "get_aws_open_id_config", + "get_aws_json_web_keys", "get_aws_server_config", "get_operation", "cancel_operation", @@ -8479,6 +10244,9 @@ def test_aws_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.delete_aws_cluster._session session2 = client2.transport.delete_aws_cluster._session assert session1 != session2 + session1 = client1.transport.generate_aws_cluster_agent_token._session + session2 = client2.transport.generate_aws_cluster_agent_token._session + assert session1 != session2 session1 = client1.transport.generate_aws_access_token._session session2 = client2.transport.generate_aws_access_token._session assert session1 != session2 @@ -8488,6 +10256,9 @@ def test_aws_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.update_aws_node_pool._session session2 = client2.transport.update_aws_node_pool._session assert session1 != session2 + session1 = client1.transport.rollback_aws_node_pool_update._session + session2 = client2.transport.rollback_aws_node_pool_update._session + assert session1 != session2 session1 = client1.transport.get_aws_node_pool._session session2 = client2.transport.get_aws_node_pool._session assert session1 != session2 @@ -8497,6 +10268,12 @@ def test_aws_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.delete_aws_node_pool._session session2 = client2.transport.delete_aws_node_pool._session assert session1 != session2 + session1 = client1.transport.get_aws_open_id_config._session + session2 = client2.transport.get_aws_open_id_config._session + assert session1 != session2 + session1 = client1.transport.get_aws_json_web_keys._session + session2 = client2.transport.get_aws_json_web_keys._session + assert session1 != session2 session1 = client1.transport.get_aws_server_config._session session2 = client2.transport.get_aws_server_config._session assert session1 != session2 diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py index 5824059c5c14..40b161bad45f 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py @@ -3398,6 +3398,175 @@ async def test_delete_azure_cluster_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + azure_service.GenerateAzureClusterAgentTokenRequest, + dict, + ], +) +def test_generate_azure_cluster_agent_token(request_type, transport: str = "grpc"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_azure_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_service.GenerateAzureClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + response = client.generate_azure_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GenerateAzureClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_service.GenerateAzureClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +def test_generate_azure_cluster_agent_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_azure_cluster_agent_token), "__call__" + ) as call: + client.generate_azure_cluster_agent_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GenerateAzureClusterAgentTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_azure_cluster_agent_token_async( + transport: str = "grpc_asyncio", + request_type=azure_service.GenerateAzureClusterAgentTokenRequest, +): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_azure_cluster_agent_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_service.GenerateAzureClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", + ) + ) + response = await client.generate_azure_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GenerateAzureClusterAgentTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_service.GenerateAzureClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" + + +@pytest.mark.asyncio +async def test_generate_azure_cluster_agent_token_async_from_dict(): + await test_generate_azure_cluster_agent_token_async(request_type=dict) + + +def test_generate_azure_cluster_agent_token_field_headers(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GenerateAzureClusterAgentTokenRequest() + + request.azure_cluster = "azure_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_azure_cluster_agent_token), "__call__" + ) as call: + call.return_value = azure_service.GenerateAzureClusterAgentTokenResponse() + client.generate_azure_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "azure_cluster=azure_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_azure_cluster_agent_token_field_headers_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GenerateAzureClusterAgentTokenRequest() + + request.azure_cluster = "azure_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_azure_cluster_agent_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_service.GenerateAzureClusterAgentTokenResponse() + ) + await client.generate_azure_cluster_agent_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "azure_cluster=azure_cluster_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -5034,11 +5203,11 @@ async def test_delete_azure_node_pool_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - azure_service.GetAzureServerConfigRequest, + azure_service.GetAzureOpenIdConfigRequest, dict, ], ) -def test_get_azure_server_config(request_type, transport: str = "grpc"): +def test_get_azure_open_id_config(request_type, transport: str = "grpc"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5050,27 +5219,41 @@ def test_get_azure_server_config(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = azure_resources.AzureServerConfig( - name="name_value", - supported_azure_regions=["supported_azure_regions_value"], + call.return_value = azure_resources.AzureOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" + ], + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], ) - response = client.get_azure_server_config(request) + response = client.get_azure_open_id_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == azure_service.GetAzureServerConfigRequest() + assert args[0] == azure_service.GetAzureOpenIdConfigRequest() # Establish that the response is the type that we expect. - assert isinstance(response, azure_resources.AzureServerConfig) - assert response.name == "name_value" - assert response.supported_azure_regions == ["supported_azure_regions_value"] + assert isinstance(response, azure_resources.AzureOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" + ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] -def test_get_azure_server_config_empty_call(): +def test_get_azure_open_id_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AzureClustersClient( @@ -5080,18 +5263,18 @@ def test_get_azure_server_config_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: - client.get_azure_server_config() + client.get_azure_open_id_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == azure_service.GetAzureServerConfigRequest() + assert args[0] == azure_service.GetAzureOpenIdConfigRequest() @pytest.mark.asyncio -async def test_get_azure_server_config_async( +async def test_get_azure_open_id_config_async( transport: str = "grpc_asyncio", - request_type=azure_service.GetAzureServerConfigRequest, + request_type=azure_service.GetAzureOpenIdConfigRequest, ): client = AzureClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5104,50 +5287,64 @@ async def test_get_azure_server_config_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - azure_resources.AzureServerConfig( - name="name_value", - supported_azure_regions=["supported_azure_regions_value"], + azure_resources.AzureOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" + ], + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], ) ) - response = await client.get_azure_server_config(request) + response = await client.get_azure_open_id_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == azure_service.GetAzureServerConfigRequest() + assert args[0] == azure_service.GetAzureOpenIdConfigRequest() # Establish that the response is the type that we expect. - assert isinstance(response, azure_resources.AzureServerConfig) - assert response.name == "name_value" - assert response.supported_azure_regions == ["supported_azure_regions_value"] + assert isinstance(response, azure_resources.AzureOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" + ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] @pytest.mark.asyncio -async def test_get_azure_server_config_async_from_dict(): - await test_get_azure_server_config_async(request_type=dict) +async def test_get_azure_open_id_config_async_from_dict(): + await test_get_azure_open_id_config_async(request_type=dict) -def test_get_azure_server_config_field_headers(): +def test_get_azure_open_id_config_field_headers(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = azure_service.GetAzureServerConfigRequest() + request = azure_service.GetAzureOpenIdConfigRequest() - request.name = "name_value" + request.azure_cluster = "azure_cluster_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: - call.return_value = azure_resources.AzureServerConfig() - client.get_azure_server_config(request) + call.return_value = azure_resources.AzureOpenIdConfig() + client.get_azure_open_id_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5158,30 +5355,30 @@ def test_get_azure_server_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "azure_cluster=azure_cluster_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_azure_server_config_field_headers_async(): +async def test_get_azure_open_id_config_field_headers_async(): client = AzureClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = azure_service.GetAzureServerConfigRequest() + request = azure_service.GetAzureOpenIdConfigRequest() - request.name = "name_value" + request.azure_cluster = "azure_cluster_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - azure_resources.AzureServerConfig() + azure_resources.AzureOpenIdConfig() ) - await client.get_azure_server_config(request) + await client.get_azure_open_id_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5192,37 +5389,37 @@ async def test_get_azure_server_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "azure_cluster=azure_cluster_value", ) in kw["metadata"] -def test_get_azure_server_config_flattened(): +def test_get_azure_open_id_config_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = azure_resources.AzureServerConfig() + call.return_value = azure_resources.AzureOpenIdConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_azure_server_config( - name="name_value", + client.get_azure_open_id_config( + azure_cluster="azure_cluster_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].azure_cluster + mock_val = "azure_cluster_value" assert arg == mock_val -def test_get_azure_server_config_flattened_error(): +def test_get_azure_open_id_config_flattened_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5230,45 +5427,45 @@ def test_get_azure_server_config_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_azure_server_config( - azure_service.GetAzureServerConfigRequest(), - name="name_value", + client.get_azure_open_id_config( + azure_service.GetAzureOpenIdConfigRequest(), + azure_cluster="azure_cluster_value", ) @pytest.mark.asyncio -async def test_get_azure_server_config_flattened_async(): +async def test_get_azure_open_id_config_flattened_async(): client = AzureClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_azure_server_config), "__call__" + type(client.transport.get_azure_open_id_config), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = azure_resources.AzureServerConfig() + call.return_value = azure_resources.AzureOpenIdConfig() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - azure_resources.AzureServerConfig() + azure_resources.AzureOpenIdConfig() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_azure_server_config( - name="name_value", + response = await client.get_azure_open_id_config( + azure_cluster="azure_cluster_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].azure_cluster + mock_val = "azure_cluster_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_azure_server_config_flattened_error_async(): +async def test_get_azure_open_id_config_flattened_error_async(): client = AzureClustersAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5276,105 +5473,1524 @@ async def test_get_azure_server_config_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_azure_server_config( - azure_service.GetAzureServerConfigRequest(), - name="name_value", + await client.get_azure_open_id_config( + azure_service.GetAzureOpenIdConfigRequest(), + azure_cluster="azure_cluster_value", ) @pytest.mark.parametrize( "request_type", [ - azure_service.CreateAzureClientRequest, + azure_service.GetAzureJsonWebKeysRequest, dict, ], ) -def test_create_azure_client_rest(request_type): +def test_get_azure_json_web_keys(request_type, transport: str = "grpc"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["azure_client"] = { - "name": "name_value", - "tenant_id": "tenant_id_value", - "application_id": "application_id_value", - "reconciling": True, - "annotations": {}, - "pem_certificate": "pem_certificate_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Determine if the message type is proto-plus or protobuf - test_field = azure_service.CreateAzureClientRequest.meta.fields["azure_client"] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureJsonWebKeys() + response = client.get_azure_json_web_keys(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureJsonWebKeysRequest() - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + assert isinstance(response, azure_resources.AzureJsonWebKeys) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) +def test_get_azure_json_web_keys_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + client.get_azure_json_web_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureJsonWebKeysRequest() + + +@pytest.mark.asyncio +async def test_get_azure_json_web_keys_async( + transport: str = "grpc_asyncio", + request_type=azure_service.GetAzureJsonWebKeysRequest, +): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureJsonWebKeys() + ) + response = await client.get_azure_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureJsonWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_resources.AzureJsonWebKeys) + + +@pytest.mark.asyncio +async def test_get_azure_json_web_keys_async_from_dict(): + await test_get_azure_json_web_keys_async(request_type=dict) + + +def test_get_azure_json_web_keys_field_headers(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GetAzureJsonWebKeysRequest() + + request.azure_cluster = "azure_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + call.return_value = azure_resources.AzureJsonWebKeys() + client.get_azure_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "azure_cluster=azure_cluster_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_azure_json_web_keys_field_headers_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GetAzureJsonWebKeysRequest() + + request.azure_cluster = "azure_cluster_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureJsonWebKeys() + ) + await client.get_azure_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "azure_cluster=azure_cluster_value", + ) in kw["metadata"] + + +def test_get_azure_json_web_keys_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureJsonWebKeys() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_azure_json_web_keys( + azure_cluster="azure_cluster_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].azure_cluster + mock_val = "azure_cluster_value" + assert arg == mock_val + + +def test_get_azure_json_web_keys_flattened_error(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_azure_json_web_keys( + azure_service.GetAzureJsonWebKeysRequest(), + azure_cluster="azure_cluster_value", + ) + + +@pytest.mark.asyncio +async def test_get_azure_json_web_keys_flattened_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureJsonWebKeys() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureJsonWebKeys() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_azure_json_web_keys( + azure_cluster="azure_cluster_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].azure_cluster + mock_val = "azure_cluster_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_azure_json_web_keys_flattened_error_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_azure_json_web_keys( + azure_service.GetAzureJsonWebKeysRequest(), + azure_cluster="azure_cluster_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + azure_service.GetAzureServerConfigRequest, + dict, + ], +) +def test_get_azure_server_config(request_type, transport: str = "grpc"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureServerConfig( + name="name_value", + supported_azure_regions=["supported_azure_regions_value"], + ) + response = client.get_azure_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_resources.AzureServerConfig) + assert response.name == "name_value" + assert response.supported_azure_regions == ["supported_azure_regions_value"] + + +def test_get_azure_server_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + client.get_azure_server_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureServerConfigRequest() + + +@pytest.mark.asyncio +async def test_get_azure_server_config_async( + transport: str = "grpc_asyncio", + request_type=azure_service.GetAzureServerConfigRequest, +): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureServerConfig( + name="name_value", + supported_azure_regions=["supported_azure_regions_value"], + ) + ) + response = await client.get_azure_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == azure_service.GetAzureServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_resources.AzureServerConfig) + assert response.name == "name_value" + assert response.supported_azure_regions == ["supported_azure_regions_value"] + + +@pytest.mark.asyncio +async def test_get_azure_server_config_async_from_dict(): + await test_get_azure_server_config_async(request_type=dict) + + +def test_get_azure_server_config_field_headers(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GetAzureServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + call.return_value = azure_resources.AzureServerConfig() + client.get_azure_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_azure_server_config_field_headers_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = azure_service.GetAzureServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureServerConfig() + ) + await client.get_azure_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_azure_server_config_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureServerConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_azure_server_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_azure_server_config_flattened_error(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_azure_server_config( + azure_service.GetAzureServerConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_azure_server_config_flattened_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_azure_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = azure_resources.AzureServerConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + azure_resources.AzureServerConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_azure_server_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_azure_server_config_flattened_error_async(): + client = AzureClustersAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_azure_server_config( + azure_service.GetAzureServerConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + azure_service.CreateAzureClientRequest, + dict, + ], +) +def test_create_azure_client_rest(request_type): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["azure_client"] = { + "name": "name_value", + "tenant_id": "tenant_id_value", + "application_id": "application_id_value", + "reconciling": True, + "annotations": {}, + "pem_certificate": "pem_certificate_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = azure_service.CreateAzureClientRequest.meta.fields["azure_client"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] - subfields_not_in_runtime = [] + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["azure_client"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["azure_client"][field])): + del request_init["azure_client"][field][i][subfield] + else: + del request_init["azure_client"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_azure_client(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_azure_client_rest_required_fields( + request_type=azure_service.CreateAzureClientRequest, +): + transport_class = transports.AzureClustersRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["azure_client_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "azureClientId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_azure_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "azureClientId" in jsonified_request + assert jsonified_request["azureClientId"] == request_init["azure_client_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["azureClientId"] = "azure_client_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_azure_client._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "azure_client_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "azureClientId" in jsonified_request + assert jsonified_request["azureClientId"] == "azure_client_id_value" + + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_azure_client(request) + + expected_params = [ + ( + "azureClientId", + "", + ), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_azure_client_rest_unset_required_fields(): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_azure_client._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "azureClientId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "azureClient", + "azureClientId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_azure_client_rest_interceptors(null_interceptor): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AzureClustersRestInterceptor(), + ) + client = AzureClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AzureClustersRestInterceptor, "post_create_azure_client" + ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "pre_create_azure_client" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = azure_service.CreateAzureClientRequest.pb( + azure_service.CreateAzureClientRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = azure_service.CreateAzureClientRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_azure_client( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_azure_client_rest_bad_request( + transport: str = "rest", request_type=azure_service.CreateAzureClientRequest +): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_azure_client(request) + + +def test_create_azure_client_rest_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + azure_client=azure_resources.AzureClient(name="name_value"), + azure_client_id="azure_client_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_azure_client(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/azureClients" + % client.transport._host, + args[1], + ) + + +def test_create_azure_client_rest_flattened_error(transport: str = "rest"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_azure_client( + azure_service.CreateAzureClientRequest(), + parent="parent_value", + azure_client=azure_resources.AzureClient(name="name_value"), + azure_client_id="azure_client_id_value", + ) + + +def test_create_azure_client_rest_error(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + azure_service.GetAzureClientRequest, + dict, + ], +) +def test_get_azure_client_rest(request_type): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = azure_resources.AzureClient( + name="name_value", + tenant_id="tenant_id_value", + application_id="application_id_value", + reconciling=True, + pem_certificate="pem_certificate_value", + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureClient.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_azure_client(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, azure_resources.AzureClient) + assert response.name == "name_value" + assert response.tenant_id == "tenant_id_value" + assert response.application_id == "application_id_value" + assert response.reconciling is True + assert response.pem_certificate == "pem_certificate_value" + assert response.uid == "uid_value" + + +def test_get_azure_client_rest_required_fields( + request_type=azure_service.GetAzureClientRequest, +): + transport_class = transports.AzureClustersRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_azure_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_azure_client._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = azure_resources.AzureClient() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_resources.AzureClient.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_azure_client(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_azure_client_rest_unset_required_fields(): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_azure_client._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_azure_client_rest_interceptors(null_interceptor): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AzureClustersRestInterceptor(), + ) + client = AzureClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AzureClustersRestInterceptor, "post_get_azure_client" + ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "pre_get_azure_client" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = azure_service.GetAzureClientRequest.pb( + azure_service.GetAzureClientRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = azure_resources.AzureClient.to_json( + azure_resources.AzureClient() + ) + + request = azure_service.GetAzureClientRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = azure_resources.AzureClient() + + client.get_azure_client( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_azure_client_rest_bad_request( + transport: str = "rest", request_type=azure_service.GetAzureClientRequest +): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_azure_client(request) + + +def test_get_azure_client_rest_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = azure_resources.AzureClient() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/azureClients/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureClient.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_azure_client(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/azureClients/*}" + % client.transport._host, + args[1], + ) + + +def test_get_azure_client_rest_flattened_error(transport: str = "rest"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_azure_client( + azure_service.GetAzureClientRequest(), + name="name_value", + ) + + +def test_get_azure_client_rest_error(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + azure_service.ListAzureClientsRequest, + dict, + ], +) +def test_list_azure_clients_rest(request_type): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = azure_service.ListAzureClientsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureClientsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_azure_clients(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAzureClientsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_azure_clients_rest_required_fields( + request_type=azure_service.ListAzureClientsRequest, +): + transport_class = transports.AzureClustersRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_azure_clients._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_azure_clients._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = azure_service.ListAzureClientsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_service.ListAzureClientsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_azure_clients(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_azure_clients_rest_unset_required_fields(): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_azure_clients._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_azure_clients_rest_interceptors(null_interceptor): + transport = transports.AzureClustersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AzureClustersRestInterceptor(), + ) + client = AzureClustersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AzureClustersRestInterceptor, "post_list_azure_clients" + ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "pre_list_azure_clients" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = azure_service.ListAzureClientsRequest.pb( + azure_service.ListAzureClientsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = azure_service.ListAzureClientsResponse.to_json( + azure_service.ListAzureClientsResponse() + ) + + request = azure_service.ListAzureClientsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = azure_service.ListAzureClientsResponse() - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["azure_client"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.list_azure_clients( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + pre.assert_called_once() + post.assert_called_once() - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["azure_client"][field])): - del request_init["azure_client"][field][i][subfield] - else: - del request_init["azure_client"][field][subfield] + +def test_list_azure_clients_rest_bad_request( + transport: str = "rest", request_type=azure_service.ListAzureClientsRequest +): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_azure_clients(request) + + +def test_list_azure_clients_rest_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = azure_service.ListAzureClientsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureClientsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_azure_clients(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/azureClients" + % client.transport._host, + args[1], + ) + + +def test_list_azure_clients_rest_flattened_error(transport: str = "rest"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_azure_clients( + azure_service.ListAzureClientsRequest(), + parent="parent_value", + ) + + +def test_list_azure_clients_rest_pager(transport: str = "rest"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + azure_service.ListAzureClientsResponse( + azure_clients=[ + azure_resources.AzureClient(), + azure_resources.AzureClient(), + azure_resources.AzureClient(), + ], + next_page_token="abc", + ), + azure_service.ListAzureClientsResponse( + azure_clients=[], + next_page_token="def", + ), + azure_service.ListAzureClientsResponse( + azure_clients=[ + azure_resources.AzureClient(), + ], + next_page_token="ghi", + ), + azure_service.ListAzureClientsResponse( + azure_clients=[ + azure_resources.AzureClient(), + azure_resources.AzureClient(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + azure_service.ListAzureClientsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_azure_clients(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, azure_resources.AzureClient) for i in results) + + pages = list(client.list_azure_clients(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + azure_service.DeleteAzureClientRequest, + dict, + ], +) +def test_delete_azure_client_rest(request_type): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5389,20 +7005,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_client(request) + response = client.delete_azure_client(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_azure_client_rest_required_fields( - request_type=azure_service.CreateAzureClientRequest, +def test_delete_azure_client_rest_required_fields( + request_type=azure_service.DeleteAzureClientRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["parent"] = "" - request_init["azure_client_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5414,37 +7029,31 @@ def test_create_azure_client_rest_required_fields( ) # verify fields with default values are dropped - assert "azureClientId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_client._get_unset_required_fields(jsonified_request) + ).delete_azure_client._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "azureClientId" in jsonified_request - assert jsonified_request["azureClientId"] == request_init["azure_client_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["azureClientId"] = "azure_client_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_client._get_unset_required_fields(jsonified_request) + ).delete_azure_client._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "azure_client_id", + "allow_missing", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "azureClientId" in jsonified_request - assert jsonified_request["azureClientId"] == "azure_client_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5465,10 +7074,9 @@ def test_create_azure_client_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -5478,43 +7086,32 @@ def test_create_azure_client_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_client(request) + response = client.delete_azure_client(request) - expected_params = [ - ( - "azureClientId", - "", - ), - ] + expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_azure_client_rest_unset_required_fields(): +def test_delete_azure_client_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_azure_client._get_unset_required_fields({}) + unset_fields = transport.delete_azure_client._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "azureClientId", + "allowMissing", "validateOnly", ) ) - & set( - ( - "parent", - "azureClient", - "azureClientId", - ) - ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_azure_client_rest_interceptors(null_interceptor): +def test_delete_azure_client_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5529,14 +7126,14 @@ def test_create_azure_client_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_create_azure_client" + transports.AzureClustersRestInterceptor, "post_delete_azure_client" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_create_azure_client" + transports.AzureClustersRestInterceptor, "pre_delete_azure_client" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.CreateAzureClientRequest.pb( - azure_service.CreateAzureClientRequest() + pb_message = azure_service.DeleteAzureClientRequest.pb( + azure_service.DeleteAzureClientRequest() ) transcode.return_value = { "method": "post", @@ -5552,7 +7149,7 @@ def test_create_azure_client_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = azure_service.CreateAzureClientRequest() + request = azure_service.DeleteAzureClientRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5560,7 +7157,7 @@ def test_create_azure_client_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_azure_client( + client.delete_azure_client( request, metadata=[ ("key", "val"), @@ -5572,8 +7169,8 @@ def test_create_azure_client_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_azure_client_rest_bad_request( - transport: str = "rest", request_type=azure_service.CreateAzureClientRequest +def test_delete_azure_client_rest_bad_request( + transport: str = "rest", request_type=azure_service.DeleteAzureClientRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5581,7 +7178,7 @@ def test_create_azure_client_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5593,10 +7190,10 @@ def test_create_azure_client_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_azure_client(request) + client.delete_azure_client(request) -def test_create_azure_client_rest_flattened(): +def test_delete_azure_client_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5608,13 +7205,13 @@ def test_create_azure_client_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/azureClients/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - azure_client=azure_resources.AzureClient(name="name_value"), - azure_client_id="azure_client_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -5625,20 +7222,20 @@ def test_create_azure_client_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_azure_client(**mock_args) + client.delete_azure_client(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/azureClients" + "%s/v1/{name=projects/*/locations/*/azureClients/*}" % client.transport._host, args[1], ) -def test_create_azure_client_rest_flattened_error(transport: str = "rest"): +def test_delete_azure_client_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5647,15 +7244,13 @@ def test_create_azure_client_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_azure_client( - azure_service.CreateAzureClientRequest(), - parent="parent_value", - azure_client=azure_resources.AzureClient(name="name_value"), - azure_client_id="azure_client_id_value", + client.delete_azure_client( + azure_service.DeleteAzureClientRequest(), + name="name_value", ) -def test_create_azure_client_rest_error(): +def test_delete_azure_client_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5664,60 +7259,187 @@ def test_create_azure_client_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.GetAzureClientRequest, + azure_service.CreateAzureClusterRequest, dict, ], ) -def test_get_azure_client_rest(request_type): +def test_create_azure_cluster_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["azure_cluster"] = { + "name": "name_value", + "description": "description_value", + "azure_region": "azure_region_value", + "resource_group_id": "resource_group_id_value", + "azure_client": "azure_client_value", + "networking": { + "virtual_network_id": "virtual_network_id_value", + "pod_address_cidr_blocks": [ + "pod_address_cidr_blocks_value1", + "pod_address_cidr_blocks_value2", + ], + "service_address_cidr_blocks": [ + "service_address_cidr_blocks_value1", + "service_address_cidr_blocks_value2", + ], + "service_load_balancer_subnet_id": "service_load_balancer_subnet_id_value", + }, + "control_plane": { + "version": "version_value", + "subnet_id": "subnet_id_value", + "vm_size": "vm_size_value", + "ssh_config": {"authorized_key": "authorized_key_value"}, + "root_volume": {"size_gib": 844}, + "main_volume": {}, + "database_encryption": {"key_id": "key_id_value"}, + "proxy_config": { + "resource_group_id": "resource_group_id_value", + "secret_id": "secret_id_value", + }, + "config_encryption": { + "key_id": "key_id_value", + "public_key": "public_key_value", + }, + "tags": {}, + "replica_placements": [ + { + "subnet_id": "subnet_id_value", + "azure_availability_zone": "azure_availability_zone_value", + } + ], + "endpoint_subnet_id": "endpoint_subnet_id_value", + }, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, + "azure_services_authentication": { + "tenant_id": "tenant_id_value", + "application_id": "application_id_value", + }, + "state": 1, + "endpoint": "endpoint_value", + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "workload_identity_config": { + "issuer_uri": "issuer_uri_value", + "workload_pool": "workload_pool_value", + "identity_provider": "identity_provider_value", + }, + "cluster_ca_certificate": "cluster_ca_certificate_value", + "fleet": {"project": "project_value", "membership": "membership_value"}, + "managed_resources": { + "network_security_group_id": "network_security_group_id_value", + "control_plane_application_security_group_id": "control_plane_application_security_group_id_value", + }, + "logging_config": {"component_config": {"enable_components": [1]}}, + "errors": [{"message": "message_value"}], + "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = azure_service.CreateAzureClusterRequest.meta.fields["azure_cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["azure_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["azure_cluster"][field])): + del request_init["azure_cluster"][field][i][subfield] + else: + del request_init["azure_cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureClient( - name="name_value", - tenant_id="tenant_id_value", - application_id="application_id_value", - reconciling=True, - pem_certificate="pem_certificate_value", - uid="uid_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_resources.AzureClient.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_client(request) + response = client.create_azure_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, azure_resources.AzureClient) - assert response.name == "name_value" - assert response.tenant_id == "tenant_id_value" - assert response.application_id == "application_id_value" - assert response.reconciling is True - assert response.pem_certificate == "pem_certificate_value" - assert response.uid == "uid_value" + assert response.operation.name == "operations/spam" -def test_get_azure_client_rest_required_fields( - request_type=azure_service.GetAzureClientRequest, +def test_create_azure_cluster_rest_required_fields( + request_type=azure_service.CreateAzureClusterRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["azure_cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5729,24 +7451,37 @@ def test_get_azure_client_rest_required_fields( ) # verify fields with default values are dropped + assert "azureClusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_client._get_unset_required_fields(jsonified_request) + ).create_azure_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "azureClusterId" in jsonified_request + assert jsonified_request["azureClusterId"] == request_init["azure_cluster_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["azureClusterId"] = "azure_cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_client._get_unset_required_fields(jsonified_request) + ).create_azure_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "azure_cluster_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "azureClusterId" in jsonified_request + assert jsonified_request["azureClusterId"] == "azure_cluster_id_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5755,7 +7490,7 @@ def test_get_azure_client_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureClient() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5767,39 +7502,56 @@ def test_get_azure_client_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = azure_resources.AzureClient.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_client(request) + response = client.create_azure_cluster(request) - expected_params = [] + expected_params = [ + ( + "azureClusterId", + "", + ), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_azure_client_rest_unset_required_fields(): +def test_create_azure_cluster_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_azure_client._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_azure_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "azureClusterId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "azureCluster", + "azureClusterId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_azure_client_rest_interceptors(null_interceptor): +def test_create_azure_cluster_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5812,14 +7564,16 @@ def test_get_azure_client_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_get_azure_client" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AzureClustersRestInterceptor, "post_create_azure_cluster" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_get_azure_client" + transports.AzureClustersRestInterceptor, "pre_create_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.GetAzureClientRequest.pb( - azure_service.GetAzureClientRequest() + pb_message = azure_service.CreateAzureClusterRequest.pb( + azure_service.CreateAzureClusterRequest() ) transcode.return_value = { "method": "post", @@ -5831,19 +7585,19 @@ def test_get_azure_client_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_resources.AzureClient.to_json( - azure_resources.AzureClient() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = azure_service.GetAzureClientRequest() + request = azure_service.CreateAzureClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_resources.AzureClient() + post.return_value = operations_pb2.Operation() - client.get_azure_client( + client.create_azure_cluster( request, metadata=[ ("key", "val"), @@ -5855,8 +7609,8 @@ def test_get_azure_client_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_azure_client_rest_bad_request( - transport: str = "rest", request_type=azure_service.GetAzureClientRequest +def test_create_azure_cluster_rest_bad_request( + transport: str = "rest", request_type=azure_service.CreateAzureClusterRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5864,7 +7618,7 @@ def test_get_azure_client_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5876,10 +7630,10 @@ def test_get_azure_client_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_azure_client(request) + client.create_azure_cluster(request) -def test_get_azure_client_rest_flattened(): +def test_create_azure_cluster_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5888,42 +7642,40 @@ def test_get_azure_client_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureClient() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/azureClients/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + azure_cluster=azure_resources.AzureCluster(name="name_value"), + azure_cluster_id="azure_cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_resources.AzureClient.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_azure_client(**mock_args) + client.create_azure_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/azureClients/*}" + "%s/v1/{parent=projects/*/locations/*}/azureClusters" % client.transport._host, args[1], ) -def test_get_azure_client_rest_flattened_error(transport: str = "rest"): +def test_create_azure_cluster_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5932,13 +7684,15 @@ def test_get_azure_client_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_azure_client( - azure_service.GetAzureClientRequest(), - name="name_value", + client.create_azure_cluster( + azure_service.CreateAzureClusterRequest(), + parent="parent_value", + azure_cluster=azure_resources.AzureCluster(name="name_value"), + azure_cluster_id="azure_cluster_id_value", ) -def test_get_azure_client_rest_error(): +def test_create_azure_cluster_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5947,50 +7701,189 @@ def test_get_azure_client_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.ListAzureClientsRequest, + azure_service.UpdateAzureClusterRequest, dict, ], ) -def test_list_azure_clients_rest(request_type): +def test_update_azure_cluster_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "azure_cluster": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3" + } + } + request_init["azure_cluster"] = { + "name": "projects/sample1/locations/sample2/azureClusters/sample3", + "description": "description_value", + "azure_region": "azure_region_value", + "resource_group_id": "resource_group_id_value", + "azure_client": "azure_client_value", + "networking": { + "virtual_network_id": "virtual_network_id_value", + "pod_address_cidr_blocks": [ + "pod_address_cidr_blocks_value1", + "pod_address_cidr_blocks_value2", + ], + "service_address_cidr_blocks": [ + "service_address_cidr_blocks_value1", + "service_address_cidr_blocks_value2", + ], + "service_load_balancer_subnet_id": "service_load_balancer_subnet_id_value", + }, + "control_plane": { + "version": "version_value", + "subnet_id": "subnet_id_value", + "vm_size": "vm_size_value", + "ssh_config": {"authorized_key": "authorized_key_value"}, + "root_volume": {"size_gib": 844}, + "main_volume": {}, + "database_encryption": {"key_id": "key_id_value"}, + "proxy_config": { + "resource_group_id": "resource_group_id_value", + "secret_id": "secret_id_value", + }, + "config_encryption": { + "key_id": "key_id_value", + "public_key": "public_key_value", + }, + "tags": {}, + "replica_placements": [ + { + "subnet_id": "subnet_id_value", + "azure_availability_zone": "azure_availability_zone_value", + } + ], + "endpoint_subnet_id": "endpoint_subnet_id_value", + }, + "authorization": { + "admin_users": [{"username": "username_value"}], + "admin_groups": [{"group": "group_value"}], + }, + "azure_services_authentication": { + "tenant_id": "tenant_id_value", + "application_id": "application_id_value", + }, + "state": 1, + "endpoint": "endpoint_value", + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "workload_identity_config": { + "issuer_uri": "issuer_uri_value", + "workload_pool": "workload_pool_value", + "identity_provider": "identity_provider_value", + }, + "cluster_ca_certificate": "cluster_ca_certificate_value", + "fleet": {"project": "project_value", "membership": "membership_value"}, + "managed_resources": { + "network_security_group_id": "network_security_group_id_value", + "control_plane_application_security_group_id": "control_plane_application_security_group_id_value", + }, + "logging_config": {"component_config": {"enable_components": [1]}}, + "errors": [{"message": "message_value"}], + "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = azure_service.UpdateAzureClusterRequest.meta.fields["azure_cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["azure_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["azure_cluster"][field])): + del request_init["azure_cluster"][field][i][subfield] + else: + del request_init["azure_cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClientsResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_service.ListAzureClientsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_clients(request) + response = client.update_azure_cluster(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAzureClientsPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_azure_clients_rest_required_fields( - request_type=azure_service.ListAzureClientsRequest, +def test_update_azure_cluster_rest_required_fields( + request_type=azure_service.UpdateAzureClusterRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6005,28 +7898,24 @@ def test_list_azure_clients_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_clients._get_unset_required_fields(jsonified_request) + ).update_azure_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_clients._get_unset_required_fields(jsonified_request) + ).update_azure_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6035,7 +7924,7 @@ def test_list_azure_clients_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClientsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6047,47 +7936,50 @@ def test_list_azure_clients_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = azure_service.ListAzureClientsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_clients(request) + response = client.update_azure_cluster(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_azure_clients_rest_unset_required_fields(): +def test_update_azure_cluster_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_azure_clients._get_unset_required_fields({}) + unset_fields = transport.update_azure_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "azureCluster", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_azure_clients_rest_interceptors(null_interceptor): +def test_update_azure_cluster_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6100,14 +7992,16 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_list_azure_clients" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AzureClustersRestInterceptor, "post_update_azure_cluster" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_list_azure_clients" + transports.AzureClustersRestInterceptor, "pre_update_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.ListAzureClientsRequest.pb( - azure_service.ListAzureClientsRequest() + pb_message = azure_service.UpdateAzureClusterRequest.pb( + azure_service.UpdateAzureClusterRequest() ) transcode.return_value = { "method": "post", @@ -6119,19 +8013,19 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_service.ListAzureClientsResponse.to_json( - azure_service.ListAzureClientsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = azure_service.ListAzureClientsRequest() + request = azure_service.UpdateAzureClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_service.ListAzureClientsResponse() + post.return_value = operations_pb2.Operation() - client.list_azure_clients( + client.update_azure_cluster( request, metadata=[ ("key", "val"), @@ -6143,8 +8037,8 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_azure_clients_rest_bad_request( - transport: str = "rest", request_type=azure_service.ListAzureClientsRequest +def test_update_azure_cluster_rest_bad_request( + transport: str = "rest", request_type=azure_service.UpdateAzureClusterRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6152,7 +8046,11 @@ def test_list_azure_clients_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "azure_cluster": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6164,10 +8062,10 @@ def test_list_azure_clients_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_azure_clients(request) + client.update_azure_cluster(request) -def test_list_azure_clients_rest_flattened(): +def test_update_azure_cluster_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6176,40 +8074,43 @@ def test_list_azure_clients_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClientsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "azure_cluster": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + azure_cluster=azure_resources.AzureCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_service.ListAzureClientsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_azure_clients(**mock_args) + client.update_azure_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/azureClients" + "%s/v1/{azure_cluster.name=projects/*/locations/*/azureClusters/*}" % client.transport._host, args[1], ) -def test_list_azure_clients_rest_flattened_error(transport: str = "rest"): +def test_update_azure_cluster_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6218,112 +8119,81 @@ def test_list_azure_clients_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_azure_clients( - azure_service.ListAzureClientsRequest(), - parent="parent_value", + client.update_azure_cluster( + azure_service.UpdateAzureClusterRequest(), + azure_cluster=azure_resources.AzureCluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_azure_clients_rest_pager(transport: str = "rest"): +def test_update_azure_cluster_rest_error(): client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - azure_service.ListAzureClientsResponse( - azure_clients=[ - azure_resources.AzureClient(), - azure_resources.AzureClient(), - azure_resources.AzureClient(), - ], - next_page_token="abc", - ), - azure_service.ListAzureClientsResponse( - azure_clients=[], - next_page_token="def", - ), - azure_service.ListAzureClientsResponse( - azure_clients=[ - azure_resources.AzureClient(), - ], - next_page_token="ghi", - ), - azure_service.ListAzureClientsResponse( - azure_clients=[ - azure_resources.AzureClient(), - azure_resources.AzureClient(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - azure_service.ListAzureClientsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_azure_clients(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, azure_resources.AzureClient) for i in results) - - pages = list(client.list_azure_clients(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - azure_service.DeleteAzureClientRequest, + azure_service.GetAzureClusterRequest, dict, ], ) -def test_delete_azure_client_rest(request_type): +def test_get_azure_cluster_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureCluster( + name="name_value", + description="description_value", + azure_region="azure_region_value", + resource_group_id="resource_group_id_value", + azure_client="azure_client_value", + state=azure_resources.AzureCluster.State.PROVISIONING, + endpoint="endpoint_value", + uid="uid_value", + reconciling=True, + etag="etag_value", + cluster_ca_certificate="cluster_ca_certificate_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_client(request) + response = client.get_azure_cluster(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, azure_resources.AzureCluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.azure_region == "azure_region_value" + assert response.resource_group_id == "resource_group_id_value" + assert response.azure_client == "azure_client_value" + assert response.state == azure_resources.AzureCluster.State.PROVISIONING + assert response.endpoint == "endpoint_value" + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.cluster_ca_certificate == "cluster_ca_certificate_value" -def test_delete_azure_client_rest_required_fields( - request_type=azure_service.DeleteAzureClientRequest, +def test_get_azure_cluster_rest_required_fields( + request_type=azure_service.GetAzureClusterRequest, ): transport_class = transports.AzureClustersRestTransport @@ -6343,7 +8213,7 @@ def test_delete_azure_client_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_client._get_unset_required_fields(jsonified_request) + ).get_azure_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6352,14 +8222,7 @@ def test_delete_azure_client_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_client._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "validate_only", - ) - ) + ).get_azure_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6373,7 +8236,7 @@ def test_delete_azure_client_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureCluster() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6385,44 +8248,39 @@ def test_delete_azure_client_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_resources.AzureCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_client(request) + response = client.get_azure_cluster(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_azure_client_rest_unset_required_fields(): +def test_get_azure_cluster_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_azure_client._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.get_azure_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_azure_client_rest_interceptors(null_interceptor): +def test_get_azure_cluster_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6435,16 +8293,14 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_delete_azure_client" + transports.AzureClustersRestInterceptor, "post_get_azure_cluster" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_delete_azure_client" + transports.AzureClustersRestInterceptor, "pre_get_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.DeleteAzureClientRequest.pb( - azure_service.DeleteAzureClientRequest() + pb_message = azure_service.GetAzureClusterRequest.pb( + azure_service.GetAzureClusterRequest() ) transcode.return_value = { "method": "post", @@ -6455,20 +8311,20 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value.request = PreparedRequest() + req.return_value._content = azure_resources.AzureCluster.to_json( + azure_resources.AzureCluster() ) - request = azure_service.DeleteAzureClientRequest() + request = azure_service.GetAzureClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = azure_resources.AzureCluster() - client.delete_azure_client( + client.get_azure_cluster( request, metadata=[ ("key", "val"), @@ -6480,8 +8336,8 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_azure_client_rest_bad_request( - transport: str = "rest", request_type=azure_service.DeleteAzureClientRequest +def test_get_azure_cluster_rest_bad_request( + transport: str = "rest", request_type=azure_service.GetAzureClusterRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6489,7 +8345,7 @@ def test_delete_azure_client_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClients/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6501,10 +8357,10 @@ def test_delete_azure_client_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_azure_client(request) + client.get_azure_cluster(request) -def test_delete_azure_client_rest_flattened(): +def test_get_azure_cluster_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6513,11 +8369,11 @@ def test_delete_azure_client_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureCluster() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/azureClients/sample3" + "name": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field @@ -6529,24 +8385,26 @@ def test_delete_azure_client_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_azure_client(**mock_args) + client.get_azure_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/azureClients/*}" + "%s/v1/{name=projects/*/locations/*/azureClusters/*}" % client.transport._host, args[1], ) -def test_delete_azure_client_rest_flattened_error(transport: str = "rest"): +def test_get_azure_cluster_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6555,13 +8413,13 @@ def test_delete_azure_client_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_azure_client( - azure_service.DeleteAzureClientRequest(), + client.get_azure_cluster( + azure_service.GetAzureClusterRequest(), name="name_value", ) -def test_delete_azure_client_rest_error(): +def test_get_azure_cluster_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6570,11 +8428,11 @@ def test_delete_azure_client_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.CreateAzureClusterRequest, + azure_service.ListAzureClustersRequest, dict, ], ) -def test_create_azure_cluster_rest(request_type): +def test_list_azure_clusters_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6582,172 +8440,38 @@ def test_create_azure_cluster_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["azure_cluster"] = { - "name": "name_value", - "description": "description_value", - "azure_region": "azure_region_value", - "resource_group_id": "resource_group_id_value", - "azure_client": "azure_client_value", - "networking": { - "virtual_network_id": "virtual_network_id_value", - "pod_address_cidr_blocks": [ - "pod_address_cidr_blocks_value1", - "pod_address_cidr_blocks_value2", - ], - "service_address_cidr_blocks": [ - "service_address_cidr_blocks_value1", - "service_address_cidr_blocks_value2", - ], - "service_load_balancer_subnet_id": "service_load_balancer_subnet_id_value", - }, - "control_plane": { - "version": "version_value", - "subnet_id": "subnet_id_value", - "vm_size": "vm_size_value", - "ssh_config": {"authorized_key": "authorized_key_value"}, - "root_volume": {"size_gib": 844}, - "main_volume": {}, - "database_encryption": {"key_id": "key_id_value"}, - "proxy_config": { - "resource_group_id": "resource_group_id_value", - "secret_id": "secret_id_value", - }, - "config_encryption": { - "key_id": "key_id_value", - "public_key": "public_key_value", - }, - "tags": {}, - "replica_placements": [ - { - "subnet_id": "subnet_id_value", - "azure_availability_zone": "azure_availability_zone_value", - } - ], - "endpoint_subnet_id": "endpoint_subnet_id_value", - }, - "authorization": {"admin_users": [{"username": "username_value"}]}, - "azure_services_authentication": { - "tenant_id": "tenant_id_value", - "application_id": "application_id_value", - }, - "state": 1, - "endpoint": "endpoint_value", - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "workload_identity_config": { - "issuer_uri": "issuer_uri_value", - "workload_pool": "workload_pool_value", - "identity_provider": "identity_provider_value", - }, - "cluster_ca_certificate": "cluster_ca_certificate_value", - "fleet": {"project": "project_value", "membership": "membership_value"}, - "managed_resources": { - "network_security_group_id": "network_security_group_id_value", - "control_plane_application_security_group_id": "control_plane_application_security_group_id_value", - }, - "logging_config": {"component_config": {"enable_components": [1]}}, - "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = azure_service.CreateAzureClusterRequest.meta.fields["azure_cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["azure_cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["azure_cluster"][field])): - del request_init["azure_cluster"][field][i][subfield] - else: - del request_init["azure_cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureClustersResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_cluster(request) + response = client.list_azure_clusters(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListAzureClustersPager) + assert response.next_page_token == "next_page_token_value" -def test_create_azure_cluster_rest_required_fields( - request_type=azure_service.CreateAzureClusterRequest, +def test_list_azure_clusters_rest_required_fields( + request_type=azure_service.ListAzureClustersRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} request_init["parent"] = "" - request_init["azure_cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6759,28 +8483,24 @@ def test_create_azure_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "azureClusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_cluster._get_unset_required_fields(jsonified_request) + ).list_azure_clusters._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "azureClusterId" in jsonified_request - assert jsonified_request["azureClusterId"] == request_init["azure_cluster_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["azureClusterId"] = "azure_cluster_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_cluster._get_unset_required_fields(jsonified_request) + ).list_azure_clusters._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "azure_cluster_id", - "validate_only", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -6788,8 +8508,6 @@ def test_create_azure_cluster_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "azureClusterId" in jsonified_request - assert jsonified_request["azureClusterId"] == "azure_cluster_id_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6798,7 +8516,7 @@ def test_create_azure_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureClustersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6810,56 +8528,47 @@ def test_create_azure_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_service.ListAzureClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_cluster(request) + response = client.list_azure_clusters(request) - expected_params = [ - ( - "azureClusterId", - "", - ), - ] + expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_azure_cluster_rest_unset_required_fields(): +def test_list_azure_clusters_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_azure_cluster._get_unset_required_fields({}) + unset_fields = transport.list_azure_clusters._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "azureClusterId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "azureCluster", - "azureClusterId", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_azure_cluster_rest_interceptors(null_interceptor): +def test_list_azure_clusters_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6872,16 +8581,14 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_create_azure_cluster" + transports.AzureClustersRestInterceptor, "post_list_azure_clusters" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_create_azure_cluster" + transports.AzureClustersRestInterceptor, "pre_list_azure_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.CreateAzureClusterRequest.pb( - azure_service.CreateAzureClusterRequest() + pb_message = azure_service.ListAzureClustersRequest.pb( + azure_service.ListAzureClustersRequest() ) transcode.return_value = { "method": "post", @@ -6893,19 +8600,19 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = azure_service.ListAzureClustersResponse.to_json( + azure_service.ListAzureClustersResponse() ) - request = azure_service.CreateAzureClusterRequest() + request = azure_service.ListAzureClustersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = azure_service.ListAzureClustersResponse() - client.create_azure_cluster( + client.list_azure_clusters( request, metadata=[ ("key", "val"), @@ -6917,8 +8624,8 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_azure_cluster_rest_bad_request( - transport: str = "rest", request_type=azure_service.CreateAzureClusterRequest +def test_list_azure_clusters_rest_bad_request( + transport: str = "rest", request_type=azure_service.ListAzureClustersRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6938,10 +8645,10 @@ def test_create_azure_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_azure_cluster(request) + client.list_azure_clusters(request) -def test_create_azure_cluster_rest_flattened(): +def test_list_azure_clusters_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6950,7 +8657,7 @@ def test_create_azure_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureClustersResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -6958,19 +8665,19 @@ def test_create_azure_cluster_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - azure_cluster=azure_resources.AzureCluster(name="name_value"), - azure_cluster_id="azure_cluster_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_azure_cluster(**mock_args) + client.list_azure_clusters(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -6983,7 +8690,7 @@ def test_create_azure_cluster_rest_flattened(): ) -def test_create_azure_cluster_rest_flattened_error(transport: str = "rest"): +def test_list_azure_clusters_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6992,177 +8699,90 @@ def test_create_azure_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_azure_cluster( - azure_service.CreateAzureClusterRequest(), + client.list_azure_clusters( + azure_service.ListAzureClustersRequest(), parent="parent_value", - azure_cluster=azure_resources.AzureCluster(name="name_value"), - azure_cluster_id="azure_cluster_id_value", ) -def test_create_azure_cluster_rest_error(): +def test_list_azure_clusters_rest_pager(transport: str = "rest"): client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + azure_service.ListAzureClustersResponse( + azure_clusters=[ + azure_resources.AzureCluster(), + azure_resources.AzureCluster(), + azure_resources.AzureCluster(), + ], + next_page_token="abc", + ), + azure_service.ListAzureClustersResponse( + azure_clusters=[], + next_page_token="def", + ), + azure_service.ListAzureClustersResponse( + azure_clusters=[ + azure_resources.AzureCluster(), + ], + next_page_token="ghi", + ), + azure_service.ListAzureClustersResponse( + azure_clusters=[ + azure_resources.AzureCluster(), + azure_resources.AzureCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + azure_service.ListAzureClustersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_azure_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, azure_resources.AzureCluster) for i in results) + + pages = list(client.list_azure_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - azure_service.UpdateAzureClusterRequest, + azure_service.DeleteAzureClusterRequest, dict, ], ) -def test_update_azure_cluster_rest(request_type): +def test_delete_azure_cluster_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "azure_cluster": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3" - } - } - request_init["azure_cluster"] = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3", - "description": "description_value", - "azure_region": "azure_region_value", - "resource_group_id": "resource_group_id_value", - "azure_client": "azure_client_value", - "networking": { - "virtual_network_id": "virtual_network_id_value", - "pod_address_cidr_blocks": [ - "pod_address_cidr_blocks_value1", - "pod_address_cidr_blocks_value2", - ], - "service_address_cidr_blocks": [ - "service_address_cidr_blocks_value1", - "service_address_cidr_blocks_value2", - ], - "service_load_balancer_subnet_id": "service_load_balancer_subnet_id_value", - }, - "control_plane": { - "version": "version_value", - "subnet_id": "subnet_id_value", - "vm_size": "vm_size_value", - "ssh_config": {"authorized_key": "authorized_key_value"}, - "root_volume": {"size_gib": 844}, - "main_volume": {}, - "database_encryption": {"key_id": "key_id_value"}, - "proxy_config": { - "resource_group_id": "resource_group_id_value", - "secret_id": "secret_id_value", - }, - "config_encryption": { - "key_id": "key_id_value", - "public_key": "public_key_value", - }, - "tags": {}, - "replica_placements": [ - { - "subnet_id": "subnet_id_value", - "azure_availability_zone": "azure_availability_zone_value", - } - ], - "endpoint_subnet_id": "endpoint_subnet_id_value", - }, - "authorization": {"admin_users": [{"username": "username_value"}]}, - "azure_services_authentication": { - "tenant_id": "tenant_id_value", - "application_id": "application_id_value", - }, - "state": 1, - "endpoint": "endpoint_value", - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "workload_identity_config": { - "issuer_uri": "issuer_uri_value", - "workload_pool": "workload_pool_value", - "identity_provider": "identity_provider_value", - }, - "cluster_ca_certificate": "cluster_ca_certificate_value", - "fleet": {"project": "project_value", "membership": "membership_value"}, - "managed_resources": { - "network_security_group_id": "network_security_group_id_value", - "control_plane_application_security_group_id": "control_plane_application_security_group_id_value", - }, - "logging_config": {"component_config": {"enable_components": [1]}}, - "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = azure_service.UpdateAzureClusterRequest.meta.fields["azure_cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["azure_cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["azure_cluster"][field])): - del request_init["azure_cluster"][field][i][subfield] - else: - del request_init["azure_cluster"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7177,18 +8797,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_azure_cluster(request) + response = client.delete_azure_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_azure_cluster_rest_required_fields( - request_type=azure_service.UpdateAzureClusterRequest, +def test_delete_azure_cluster_rest_required_fields( + request_type=azure_service.DeleteAzureClusterRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7203,24 +8824,29 @@ def test_update_azure_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_azure_cluster._get_unset_required_fields(jsonified_request) + ).delete_azure_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_azure_cluster._get_unset_required_fields(jsonified_request) + ).delete_azure_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", + "allow_missing", + "etag", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7241,10 +8867,9 @@ def test_update_azure_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -7254,37 +8879,33 @@ def test_update_azure_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_azure_cluster(request) + response = client.delete_azure_cluster(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_azure_cluster_rest_unset_required_fields(): +def test_delete_azure_cluster_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_azure_cluster._get_unset_required_fields({}) + unset_fields = transport.delete_azure_cluster._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", + "allowMissing", + "etag", "validateOnly", ) ) - & set( - ( - "azureCluster", - "updateMask", - ) - ) + & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_azure_cluster_rest_interceptors(null_interceptor): +def test_delete_azure_cluster_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7299,14 +8920,14 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_update_azure_cluster" + transports.AzureClustersRestInterceptor, "post_delete_azure_cluster" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_update_azure_cluster" + transports.AzureClustersRestInterceptor, "pre_delete_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.UpdateAzureClusterRequest.pb( - azure_service.UpdateAzureClusterRequest() + pb_message = azure_service.DeleteAzureClusterRequest.pb( + azure_service.DeleteAzureClusterRequest() ) transcode.return_value = { "method": "post", @@ -7322,7 +8943,7 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = azure_service.UpdateAzureClusterRequest() + request = azure_service.DeleteAzureClusterRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -7330,7 +8951,7 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_azure_cluster( + client.delete_azure_cluster( request, metadata=[ ("key", "val"), @@ -7342,8 +8963,8 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_azure_cluster_rest_bad_request( - transport: str = "rest", request_type=azure_service.UpdateAzureClusterRequest +def test_delete_azure_cluster_rest_bad_request( + transport: str = "rest", request_type=azure_service.DeleteAzureClusterRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7351,11 +8972,7 @@ def test_update_azure_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "azure_cluster": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7367,10 +8984,10 @@ def test_update_azure_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_azure_cluster(request) + client.delete_azure_cluster(request) -def test_update_azure_cluster_rest_flattened(): +def test_delete_azure_cluster_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7383,15 +9000,12 @@ def test_update_azure_cluster_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "azure_cluster": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3" - } + "name": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - azure_cluster=azure_resources.AzureCluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -7402,20 +9016,20 @@ def test_update_azure_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_azure_cluster(**mock_args) + client.delete_azure_cluster(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{azure_cluster.name=projects/*/locations/*/azureClusters/*}" + "%s/v1/{name=projects/*/locations/*/azureClusters/*}" % client.transport._host, args[1], ) -def test_update_azure_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_azure_cluster_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7424,14 +9038,13 @@ def test_update_azure_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_azure_cluster( - azure_service.UpdateAzureClusterRequest(), - azure_cluster=azure_resources.AzureCluster(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_azure_cluster( + azure_service.DeleteAzureClusterRequest(), + name="name_value", ) -def test_update_azure_cluster_rest_error(): +def test_delete_azure_cluster_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7440,70 +9053,61 @@ def test_update_azure_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.GetAzureClusterRequest, + azure_service.GenerateAzureClusterAgentTokenRequest, dict, ], ) -def test_get_azure_cluster_rest(request_type): +def test_generate_azure_cluster_agent_token_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} + request_init = { + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureCluster( - name="name_value", - description="description_value", - azure_region="azure_region_value", - resource_group_id="resource_group_id_value", - azure_client="azure_client_value", - state=azure_resources.AzureCluster.State.PROVISIONING, - endpoint="endpoint_value", - uid="uid_value", - reconciling=True, - etag="etag_value", - cluster_ca_certificate="cluster_ca_certificate_value", + return_value = azure_service.GenerateAzureClusterAgentTokenResponse( + access_token="access_token_value", + expires_in=1078, + token_type="token_type_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_resources.AzureCluster.pb(return_value) + return_value = azure_service.GenerateAzureClusterAgentTokenResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_cluster(request) + response = client.generate_azure_cluster_agent_token(request) # Establish that the response is the type that we expect. - assert isinstance(response, azure_resources.AzureCluster) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.azure_region == "azure_region_value" - assert response.resource_group_id == "resource_group_id_value" - assert response.azure_client == "azure_client_value" - assert response.state == azure_resources.AzureCluster.State.PROVISIONING - assert response.endpoint == "endpoint_value" - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.cluster_ca_certificate == "cluster_ca_certificate_value" + assert isinstance(response, azure_service.GenerateAzureClusterAgentTokenResponse) + assert response.access_token == "access_token_value" + assert response.expires_in == 1078 + assert response.token_type == "token_type_value" -def test_get_azure_cluster_rest_required_fields( - request_type=azure_service.GetAzureClusterRequest, +def test_generate_azure_cluster_agent_token_rest_required_fields( + request_type=azure_service.GenerateAzureClusterAgentTokenRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["azure_cluster"] = "" + request_init["subject_token"] = "" + request_init["subject_token_type"] = "" + request_init["version"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7518,21 +9122,30 @@ def test_get_azure_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_cluster._get_unset_required_fields(jsonified_request) + ).generate_azure_cluster_agent_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["azureCluster"] = "azure_cluster_value" + jsonified_request["subjectToken"] = "subject_token_value" + jsonified_request["subjectTokenType"] = "subject_token_type_value" + jsonified_request["version"] = "version_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_cluster._get_unset_required_fields(jsonified_request) + ).generate_azure_cluster_agent_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "azureCluster" in jsonified_request + assert jsonified_request["azureCluster"] == "azure_cluster_value" + assert "subjectToken" in jsonified_request + assert jsonified_request["subjectToken"] == "subject_token_value" + assert "subjectTokenType" in jsonified_request + assert jsonified_request["subjectTokenType"] == "subject_token_type_value" + assert "version" in jsonified_request + assert jsonified_request["version"] == "version_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7541,7 +9154,7 @@ def test_get_azure_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureCluster() + return_value = azure_service.GenerateAzureClusterAgentTokenResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7553,39 +9166,54 @@ def test_get_azure_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_resources.AzureCluster.pb(return_value) + return_value = azure_service.GenerateAzureClusterAgentTokenResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_cluster(request) + response = client.generate_azure_cluster_agent_token(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_azure_cluster_rest_unset_required_fields(): +def test_generate_azure_cluster_agent_token_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_azure_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.generate_azure_cluster_agent_token._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "azureCluster", + "subjectToken", + "subjectTokenType", + "version", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_azure_cluster_rest_interceptors(null_interceptor): +def test_generate_azure_cluster_agent_token_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7598,14 +9226,16 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_get_azure_cluster" + transports.AzureClustersRestInterceptor, + "post_generate_azure_cluster_agent_token", ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_get_azure_cluster" + transports.AzureClustersRestInterceptor, + "pre_generate_azure_cluster_agent_token", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.GetAzureClusterRequest.pb( - azure_service.GetAzureClusterRequest() + pb_message = azure_service.GenerateAzureClusterAgentTokenRequest.pb( + azure_service.GenerateAzureClusterAgentTokenRequest() ) transcode.return_value = { "method": "post", @@ -7617,19 +9247,21 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_resources.AzureCluster.to_json( - azure_resources.AzureCluster() + req.return_value._content = ( + azure_service.GenerateAzureClusterAgentTokenResponse.to_json( + azure_service.GenerateAzureClusterAgentTokenResponse() + ) ) - request = azure_service.GetAzureClusterRequest() + request = azure_service.GenerateAzureClusterAgentTokenRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_resources.AzureCluster() + post.return_value = azure_service.GenerateAzureClusterAgentTokenResponse() - client.get_azure_cluster( + client.generate_azure_cluster_agent_token( request, metadata=[ ("key", "val"), @@ -7641,8 +9273,9 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_azure_cluster_rest_bad_request( - transport: str = "rest", request_type=azure_service.GetAzureClusterRequest +def test_generate_azure_cluster_agent_token_rest_bad_request( + transport: str = "rest", + request_type=azure_service.GenerateAzureClusterAgentTokenRequest, ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7650,7 +9283,9 @@ def test_get_azure_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} + request_init = { + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7662,69 +9297,10 @@ def test_get_azure_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_azure_cluster(request) - - -def test_get_azure_cluster_rest_flattened(): - client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureCluster() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_resources.AzureCluster.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_azure_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/azureClusters/*}" - % client.transport._host, - args[1], - ) - - -def test_get_azure_cluster_rest_flattened_error(transport: str = "rest"): - client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_azure_cluster( - azure_service.GetAzureClusterRequest(), - name="name_value", - ) + client.generate_azure_cluster_agent_token(request) -def test_get_azure_cluster_rest_error(): +def test_generate_azure_cluster_agent_token_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7733,50 +9309,52 @@ def test_get_azure_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.ListAzureClustersRequest, + azure_service.GenerateAzureAccessTokenRequest, dict, ], ) -def test_list_azure_clusters_rest(request_type): +def test_generate_azure_access_token_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClustersResponse( - next_page_token="next_page_token_value", + return_value = azure_service.GenerateAzureAccessTokenResponse( + access_token="access_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_service.ListAzureClustersResponse.pb(return_value) + return_value = azure_service.GenerateAzureAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_clusters(request) + response = client.generate_azure_access_token(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAzureClustersPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, azure_service.GenerateAzureAccessTokenResponse) + assert response.access_token == "access_token_value" -def test_list_azure_clusters_rest_required_fields( - request_type=azure_service.ListAzureClustersRequest, +def test_generate_azure_access_token_rest_required_fields( + request_type=azure_service.GenerateAzureAccessTokenRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["parent"] = "" + request_init["azure_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7791,28 +9369,21 @@ def test_list_azure_clusters_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_clusters._get_unset_required_fields(jsonified_request) + ).generate_azure_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["azureCluster"] = "azure_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_clusters._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).generate_azure_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "azureCluster" in jsonified_request + assert jsonified_request["azureCluster"] == "azure_cluster_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7821,7 +9392,7 @@ def test_list_azure_clusters_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClustersResponse() + return_value = azure_service.GenerateAzureAccessTokenResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7842,38 +9413,32 @@ def test_list_azure_clusters_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_service.ListAzureClustersResponse.pb(return_value) + return_value = azure_service.GenerateAzureAccessTokenResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_clusters(request) + response = client.generate_azure_access_token(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_azure_clusters_rest_unset_required_fields(): +def test_generate_azure_access_token_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_azure_clusters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.generate_azure_access_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("azureCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_azure_clusters_rest_interceptors(null_interceptor): +def test_generate_azure_access_token_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7886,14 +9451,14 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_list_azure_clusters" + transports.AzureClustersRestInterceptor, "post_generate_azure_access_token" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_list_azure_clusters" + transports.AzureClustersRestInterceptor, "pre_generate_azure_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.ListAzureClustersRequest.pb( - azure_service.ListAzureClustersRequest() + pb_message = azure_service.GenerateAzureAccessTokenRequest.pb( + azure_service.GenerateAzureAccessTokenRequest() ) transcode.return_value = { "method": "post", @@ -7905,19 +9470,21 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_service.ListAzureClustersResponse.to_json( - azure_service.ListAzureClustersResponse() + req.return_value._content = ( + azure_service.GenerateAzureAccessTokenResponse.to_json( + azure_service.GenerateAzureAccessTokenResponse() + ) ) - request = azure_service.ListAzureClustersRequest() + request = azure_service.GenerateAzureAccessTokenRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_service.ListAzureClustersResponse() + post.return_value = azure_service.GenerateAzureAccessTokenResponse() - client.list_azure_clusters( + client.generate_azure_access_token( request, metadata=[ ("key", "val"), @@ -7929,8 +9496,8 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_azure_clusters_rest_bad_request( - transport: str = "rest", request_type=azure_service.ListAzureClustersRequest +def test_generate_azure_access_token_rest_bad_request( + transport: str = "rest", request_type=azure_service.GenerateAzureAccessTokenRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7938,7 +9505,9 @@ def test_list_azure_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7950,144 +9519,133 @@ def test_list_azure_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_azure_clusters(request) + client.generate_azure_access_token(request) -def test_list_azure_clusters_rest_flattened(): +def test_generate_azure_access_token_rest_error(): client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureClustersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_service.ListAzureClustersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_azure_clusters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/azureClusters" - % client.transport._host, - args[1], - ) - -def test_list_azure_clusters_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + azure_service.CreateAzureNodePoolRequest, + dict, + ], +) +def test_create_azure_node_pool_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_azure_clusters( - azure_service.ListAzureClustersRequest(), - parent="parent_value", - ) - - -def test_list_azure_clusters_rest_pager(transport: str = "rest"): - client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + } + request_init["azure_node_pool"] = { + "name": "name_value", + "version": "version_value", + "config": { + "vm_size": "vm_size_value", + "root_volume": {"size_gib": 844}, + "tags": {}, + "image_type": "image_type_value", + "ssh_config": {"authorized_key": "authorized_key_value"}, + "proxy_config": { + "resource_group_id": "resource_group_id_value", + "secret_id": "secret_id_value", + }, + "config_encryption": { + "key_id": "key_id_value", + "public_key": "public_key_value", + }, + "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], + "labels": {}, + }, + "subnet_id": "subnet_id_value", + "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, + "state": 1, + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "max_pods_constraint": {"max_pods_per_node": 1798}, + "azure_availability_zone": "azure_availability_zone_value", + "errors": [{"message": "message_value"}], + "management": {"auto_repair": True}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - azure_service.ListAzureClustersResponse( - azure_clusters=[ - azure_resources.AzureCluster(), - azure_resources.AzureCluster(), - azure_resources.AzureCluster(), - ], - next_page_token="abc", - ), - azure_service.ListAzureClustersResponse( - azure_clusters=[], - next_page_token="def", - ), - azure_service.ListAzureClustersResponse( - azure_clusters=[ - azure_resources.AzureCluster(), - ], - next_page_token="ghi", - ), - azure_service.ListAzureClustersResponse( - azure_clusters=[ - azure_resources.AzureCluster(), - azure_resources.AzureCluster(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Determine if the message type is proto-plus or protobuf + test_field = azure_service.CreateAzureNodePoolRequest.meta.fields["azure_node_pool"] - # Wrap the values into proper Response objs - response = tuple( - azure_service.ListAzureClustersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - sample_request = {"parent": "projects/sample1/locations/sample2"} + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - pager = client.list_azure_clusters(request=sample_request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, azure_resources.AzureCluster) for i in results) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - pages = list(client.list_azure_clusters(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["azure_node_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - azure_service.DeleteAzureClusterRequest, - dict, - ], -) -def test_delete_azure_cluster_rest(request_type): - client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["azure_node_pool"][field])): + del request_init["azure_node_pool"][field][i][subfield] + else: + del request_init["azure_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8102,19 +9660,20 @@ def test_delete_azure_cluster_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_cluster(request) + response = client.create_azure_node_pool(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_azure_cluster_rest_required_fields( - request_type=azure_service.DeleteAzureClusterRequest, +def test_create_azure_node_pool_rest_required_fields( + request_type=azure_service.CreateAzureNodePoolRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["azure_node_pool_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8126,32 +9685,37 @@ def test_delete_azure_cluster_rest_required_fields( ) # verify fields with default values are dropped + assert "azureNodePoolId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_cluster._get_unset_required_fields(jsonified_request) + ).create_azure_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "azureNodePoolId" in jsonified_request + assert jsonified_request["azureNodePoolId"] == request_init["azure_node_pool_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["azureNodePoolId"] = "azure_node_pool_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_cluster._get_unset_required_fields(jsonified_request) + ).create_azure_node_pool._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "etag", + "azure_node_pool_id", "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "azureNodePoolId" in jsonified_request + assert jsonified_request["azureNodePoolId"] == "azure_node_pool_id_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8172,9 +9736,10 @@ def test_delete_azure_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8184,33 +9749,43 @@ def test_delete_azure_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_cluster(request) + response = client.create_azure_node_pool(request) - expected_params = [] + expected_params = [ + ( + "azureNodePoolId", + "", + ), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_azure_cluster_rest_unset_required_fields(): +def test_create_azure_node_pool_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_azure_cluster._get_unset_required_fields({}) + unset_fields = transport.create_azure_node_pool._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "etag", + "azureNodePoolId", "validateOnly", ) ) - & set(("name",)) + & set( + ( + "parent", + "azureNodePool", + "azureNodePoolId", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_azure_cluster_rest_interceptors(null_interceptor): +def test_create_azure_node_pool_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8225,14 +9800,14 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_delete_azure_cluster" + transports.AzureClustersRestInterceptor, "post_create_azure_node_pool" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_delete_azure_cluster" + transports.AzureClustersRestInterceptor, "pre_create_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.DeleteAzureClusterRequest.pb( - azure_service.DeleteAzureClusterRequest() + pb_message = azure_service.CreateAzureNodePoolRequest.pb( + azure_service.CreateAzureNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -8248,7 +9823,7 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = azure_service.DeleteAzureClusterRequest() + request = azure_service.CreateAzureNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -8256,7 +9831,7 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_azure_cluster( + client.create_azure_node_pool( request, metadata=[ ("key", "val"), @@ -8268,8 +9843,8 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_azure_cluster_rest_bad_request( - transport: str = "rest", request_type=azure_service.DeleteAzureClusterRequest +def test_create_azure_node_pool_rest_bad_request( + transport: str = "rest", request_type=azure_service.CreateAzureNodePoolRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8277,7 +9852,9 @@ def test_delete_azure_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/azureClusters/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8289,10 +9866,10 @@ def test_delete_azure_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_azure_cluster(request) + client.create_azure_node_pool(request) -def test_delete_azure_cluster_rest_flattened(): +def test_create_azure_node_pool_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8305,12 +9882,14 @@ def test_delete_azure_cluster_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3" + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + azure_node_pool=azure_resources.AzureNodePool(name="name_value"), + azure_node_pool_id="azure_node_pool_id_value", ) mock_args.update(sample_request) @@ -8321,20 +9900,20 @@ def test_delete_azure_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_azure_cluster(**mock_args) + client.create_azure_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/azureClusters/*}" + "%s/v1/{parent=projects/*/locations/*/azureClusters/*}/azureNodePools" % client.transport._host, args[1], ) -def test_delete_azure_cluster_rest_flattened_error(transport: str = "rest"): +def test_create_azure_node_pool_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8343,13 +9922,15 @@ def test_delete_azure_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_azure_cluster( - azure_service.DeleteAzureClusterRequest(), - name="name_value", + client.create_azure_node_pool( + azure_service.CreateAzureNodePoolRequest(), + parent="parent_value", + azure_node_pool=azure_resources.AzureNodePool(name="name_value"), + azure_node_pool_id="azure_node_pool_id_value", ) -def test_delete_azure_cluster_rest_error(): +def test_create_azure_node_pool_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8358,11 +9939,11 @@ def test_delete_azure_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.GenerateAzureAccessTokenRequest, + azure_service.UpdateAzureNodePoolRequest, dict, ], ) -def test_generate_azure_access_token_rest(request_type): +def test_update_azure_node_pool_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8370,40 +9951,137 @@ def test_generate_azure_access_token_rest(request_type): # send a request that will satisfy transcoding request_init = { - "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + "azure_node_pool": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + } + } + request_init["azure_node_pool"] = { + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4", + "version": "version_value", + "config": { + "vm_size": "vm_size_value", + "root_volume": {"size_gib": 844}, + "tags": {}, + "image_type": "image_type_value", + "ssh_config": {"authorized_key": "authorized_key_value"}, + "proxy_config": { + "resource_group_id": "resource_group_id_value", + "secret_id": "secret_id_value", + }, + "config_encryption": { + "key_id": "key_id_value", + "public_key": "public_key_value", + }, + "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], + "labels": {}, + }, + "subnet_id": "subnet_id_value", + "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, + "state": 1, + "uid": "uid_value", + "reconciling": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "annotations": {}, + "max_pods_constraint": {"max_pods_per_node": 1798}, + "azure_availability_zone": "azure_availability_zone_value", + "errors": [{"message": "message_value"}], + "management": {"auto_repair": True}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = azure_service.UpdateAzureNodePoolRequest.meta.fields["azure_node_pool"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["azure_node_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["azure_node_pool"][field])): + del request_init["azure_node_pool"][field][i][subfield] + else: + del request_init["azure_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.GenerateAzureAccessTokenResponse( - access_token="access_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_service.GenerateAzureAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_azure_access_token(request) + response = client.update_azure_node_pool(request) # Establish that the response is the type that we expect. - assert isinstance(response, azure_service.GenerateAzureAccessTokenResponse) - assert response.access_token == "access_token_value" + assert response.operation.name == "operations/spam" -def test_generate_azure_access_token_rest_required_fields( - request_type=azure_service.GenerateAzureAccessTokenRequest, +def test_update_azure_node_pool_rest_required_fields( + request_type=azure_service.UpdateAzureNodePoolRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["azure_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8418,21 +10096,24 @@ def test_generate_azure_access_token_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_azure_access_token._get_unset_required_fields(jsonified_request) + ).update_azure_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) - # verify required fields with default values are now present - - jsonified_request["azureCluster"] = "azure_cluster_value" + # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).generate_azure_access_token._get_unset_required_fields(jsonified_request) + ).update_azure_node_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "azureCluster" in jsonified_request - assert jsonified_request["azureCluster"] == "azure_cluster_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8441,7 +10122,7 @@ def test_generate_azure_access_token_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_service.GenerateAzureAccessTokenResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8453,41 +10134,50 @@ def test_generate_azure_access_token_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = azure_service.GenerateAzureAccessTokenResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.generate_azure_access_token(request) + response = client.update_azure_node_pool(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_generate_azure_access_token_rest_unset_required_fields(): +def test_update_azure_node_pool_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.generate_azure_access_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("azureCluster",))) + unset_fields = transport.update_azure_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "azureNodePool", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_azure_access_token_rest_interceptors(null_interceptor): +def test_update_azure_node_pool_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8500,14 +10190,16 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_generate_azure_access_token" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AzureClustersRestInterceptor, "post_update_azure_node_pool" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_generate_azure_access_token" + transports.AzureClustersRestInterceptor, "pre_update_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.GenerateAzureAccessTokenRequest.pb( - azure_service.GenerateAzureAccessTokenRequest() + pb_message = azure_service.UpdateAzureNodePoolRequest.pb( + azure_service.UpdateAzureNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -8519,21 +10211,19 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - azure_service.GenerateAzureAccessTokenResponse.to_json( - azure_service.GenerateAzureAccessTokenResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = azure_service.GenerateAzureAccessTokenRequest() + request = azure_service.UpdateAzureNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_service.GenerateAzureAccessTokenResponse() + post.return_value = operations_pb2.Operation() - client.generate_azure_access_token( + client.update_azure_node_pool( request, metadata=[ ("key", "val"), @@ -8545,8 +10235,8 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): post.assert_called_once() -def test_generate_azure_access_token_rest_bad_request( - transport: str = "rest", request_type=azure_service.GenerateAzureAccessTokenRequest +def test_update_azure_node_pool_rest_bad_request( + transport: str = "rest", request_type=azure_service.UpdateAzureNodePoolRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8555,7 +10245,9 @@ def test_generate_azure_access_token_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" + "azure_node_pool": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + } } request = request_type(**request_init) @@ -8568,10 +10260,71 @@ def test_generate_azure_access_token_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.generate_azure_access_token(request) + client.update_azure_node_pool(request) -def test_generate_azure_access_token_rest_error(): +def test_update_azure_node_pool_rest_flattened(): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "azure_node_pool": { + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + azure_node_pool=azure_resources.AzureNodePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_azure_node_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{azure_node_pool.name=projects/*/locations/*/azureClusters/*/azureNodePools/*}" + % client.transport._host, + args[1], + ) + + +def test_update_azure_node_pool_rest_flattened_error(transport: str = "rest"): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_azure_node_pool( + azure_service.UpdateAzureNodePoolRequest(), + azure_node_pool=azure_resources.AzureNodePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_azure_node_pool_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8580,11 +10333,11 @@ def test_generate_azure_access_token_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.CreateAzureNodePoolRequest, + azure_service.GetAzureNodePoolRequest, dict, ], ) -def test_create_azure_node_pool_rest(request_type): +def test_get_azure_node_pool_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8592,136 +10345,54 @@ def test_create_azure_node_pool_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" - } - request_init["azure_node_pool"] = { - "name": "name_value", - "version": "version_value", - "config": { - "vm_size": "vm_size_value", - "root_volume": {"size_gib": 844}, - "tags": {}, - "image_type": "image_type_value", - "ssh_config": {"authorized_key": "authorized_key_value"}, - "proxy_config": { - "resource_group_id": "resource_group_id_value", - "secret_id": "secret_id_value", - }, - "config_encryption": { - "key_id": "key_id_value", - "public_key": "public_key_value", - }, - "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], - "labels": {}, - }, - "subnet_id": "subnet_id_value", - "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, - "state": 1, - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "max_pods_constraint": {"max_pods_per_node": 1798}, - "azure_availability_zone": "azure_availability_zone_value", - "errors": [{"message": "message_value"}], + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = azure_service.CreateAzureNodePoolRequest.meta.fields["azure_node_pool"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["azure_node_pool"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["azure_node_pool"][field])): - del request_init["azure_node_pool"][field][i][subfield] - else: - del request_init["azure_node_pool"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureNodePool( + name="name_value", + version="version_value", + subnet_id="subnet_id_value", + state=azure_resources.AzureNodePool.State.PROVISIONING, + uid="uid_value", + reconciling=True, + etag="etag_value", + azure_availability_zone="azure_availability_zone_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_node_pool(request) + response = client.get_azure_node_pool(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, azure_resources.AzureNodePool) + assert response.name == "name_value" + assert response.version == "version_value" + assert response.subnet_id == "subnet_id_value" + assert response.state == azure_resources.AzureNodePool.State.PROVISIONING + assert response.uid == "uid_value" + assert response.reconciling is True + assert response.etag == "etag_value" + assert response.azure_availability_zone == "azure_availability_zone_value" -def test_create_azure_node_pool_rest_required_fields( - request_type=azure_service.CreateAzureNodePoolRequest, +def test_get_azure_node_pool_rest_required_fields( + request_type=azure_service.GetAzureNodePoolRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["parent"] = "" - request_init["azure_node_pool_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8733,37 +10404,24 @@ def test_create_azure_node_pool_rest_required_fields( ) # verify fields with default values are dropped - assert "azureNodePoolId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_node_pool._get_unset_required_fields(jsonified_request) + ).get_azure_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "azureNodePoolId" in jsonified_request - assert jsonified_request["azureNodePoolId"] == request_init["azure_node_pool_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["azureNodePoolId"] = "azure_node_pool_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_azure_node_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "azure_node_pool_id", - "validate_only", - ) - ) + ).get_azure_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "azureNodePoolId" in jsonified_request - assert jsonified_request["azureNodePoolId"] == "azure_node_pool_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8772,7 +10430,7 @@ def test_create_azure_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureNodePool() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8784,56 +10442,39 @@ def test_create_azure_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_azure_node_pool(request) + response = client.get_azure_node_pool(request) - expected_params = [ - ( - "azureNodePoolId", - "", - ), - ] + expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_azure_node_pool_rest_unset_required_fields(): +def test_get_azure_node_pool_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_azure_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "azureNodePoolId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "azureNodePool", - "azureNodePoolId", - ) - ) - ) + unset_fields = transport.get_azure_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_azure_node_pool_rest_interceptors(null_interceptor): +def test_get_azure_node_pool_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8846,16 +10487,14 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_create_azure_node_pool" + transports.AzureClustersRestInterceptor, "post_get_azure_node_pool" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_create_azure_node_pool" + transports.AzureClustersRestInterceptor, "pre_get_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.CreateAzureNodePoolRequest.pb( - azure_service.CreateAzureNodePoolRequest() + pb_message = azure_service.GetAzureNodePoolRequest.pb( + azure_service.GetAzureNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -8867,19 +10506,19 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = azure_resources.AzureNodePool.to_json( + azure_resources.AzureNodePool() ) - request = azure_service.CreateAzureNodePoolRequest() + request = azure_service.GetAzureNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = azure_resources.AzureNodePool() - client.create_azure_node_pool( + client.get_azure_node_pool( request, metadata=[ ("key", "val"), @@ -8891,8 +10530,8 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_azure_node_pool_rest_bad_request( - transport: str = "rest", request_type=azure_service.CreateAzureNodePoolRequest +def test_get_azure_node_pool_rest_bad_request( + transport: str = "rest", request_type=azure_service.GetAzureNodePoolRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8901,7 +10540,7 @@ def test_create_azure_node_pool_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" } request = request_type(**request_init) @@ -8914,10 +10553,10 @@ def test_create_azure_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_azure_node_pool(request) + client.get_azure_node_pool(request) -def test_create_azure_node_pool_rest_flattened(): +def test_get_azure_node_pool_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8926,42 +10565,42 @@ def test_create_azure_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureNodePool() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - azure_node_pool=azure_resources.AzureNodePool(name="name_value"), - azure_node_pool_id="azure_node_pool_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_azure_node_pool(**mock_args) + client.get_azure_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/azureClusters/*}/azureNodePools" + "%s/v1/{name=projects/*/locations/*/azureClusters/*/azureNodePools/*}" % client.transport._host, args[1], ) -def test_create_azure_node_pool_rest_flattened_error(transport: str = "rest"): +def test_get_azure_node_pool_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8970,15 +10609,13 @@ def test_create_azure_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_azure_node_pool( - azure_service.CreateAzureNodePoolRequest(), - parent="parent_value", - azure_node_pool=azure_resources.AzureNodePool(name="name_value"), - azure_node_pool_id="azure_node_pool_id_value", + client.get_azure_node_pool( + azure_service.GetAzureNodePoolRequest(), + name="name_value", ) -def test_create_azure_node_pool_rest_error(): +def test_get_azure_node_pool_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8987,148 +10624,52 @@ def test_create_azure_node_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.UpdateAzureNodePoolRequest, + azure_service.ListAzureNodePoolsRequest, dict, - ], -) -def test_update_azure_node_pool_rest(request_type): - client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "azure_node_pool": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" - } - } - request_init["azure_node_pool"] = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4", - "version": "version_value", - "config": { - "vm_size": "vm_size_value", - "root_volume": {"size_gib": 844}, - "tags": {}, - "image_type": "image_type_value", - "ssh_config": {"authorized_key": "authorized_key_value"}, - "proxy_config": { - "resource_group_id": "resource_group_id_value", - "secret_id": "secret_id_value", - }, - "config_encryption": { - "key_id": "key_id_value", - "public_key": "public_key_value", - }, - "taints": [{"key": "key_value", "value": "value_value", "effect": 1}], - "labels": {}, - }, - "subnet_id": "subnet_id_value", - "autoscaling": {"min_node_count": 1489, "max_node_count": 1491}, - "state": 1, - "uid": "uid_value", - "reconciling": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "etag": "etag_value", - "annotations": {}, - "max_pods_constraint": {"max_pods_per_node": 1798}, - "azure_availability_zone": "azure_availability_zone_value", - "errors": [{"message": "message_value"}], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = azure_service.UpdateAzureNodePoolRequest.meta.fields["azure_node_pool"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["azure_node_pool"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["azure_node_pool"][field])): - del request_init["azure_node_pool"][field][i][subfield] - else: - del request_init["azure_node_pool"][field][subfield] + ], +) +def test_list_azure_node_pools_rest(request_type): + client = AzureClustersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureNodePoolsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_azure_node_pool(request) + response = client.list_azure_node_pools(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListAzureNodePoolsPager) + assert response.next_page_token == "next_page_token_value" -def test_update_azure_node_pool_rest_required_fields( - request_type=azure_service.UpdateAzureNodePoolRequest, +def test_list_azure_node_pools_rest_required_fields( + request_type=azure_service.ListAzureNodePoolsRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9143,24 +10684,28 @@ def test_update_azure_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_azure_node_pool._get_unset_required_fields(jsonified_request) + ).list_azure_node_pools._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_azure_node_pool._get_unset_required_fields(jsonified_request) + ).list_azure_node_pools._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9169,7 +10714,7 @@ def test_update_azure_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureNodePoolsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9181,50 +10726,47 @@ def test_update_azure_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_azure_node_pool(request) + response = client.list_azure_node_pools(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_azure_node_pool_rest_unset_required_fields(): +def test_list_azure_node_pools_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_azure_node_pool._get_unset_required_fields({}) + unset_fields = transport.list_azure_node_pools._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "azureNodePool", - "updateMask", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_azure_node_pool_rest_interceptors(null_interceptor): +def test_list_azure_node_pools_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9237,16 +10779,14 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_update_azure_node_pool" + transports.AzureClustersRestInterceptor, "post_list_azure_node_pools" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_update_azure_node_pool" + transports.AzureClustersRestInterceptor, "pre_list_azure_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.UpdateAzureNodePoolRequest.pb( - azure_service.UpdateAzureNodePoolRequest() + pb_message = azure_service.ListAzureNodePoolsRequest.pb( + azure_service.ListAzureNodePoolsRequest() ) transcode.return_value = { "method": "post", @@ -9258,19 +10798,19 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = azure_service.ListAzureNodePoolsResponse.to_json( + azure_service.ListAzureNodePoolsResponse() ) - request = azure_service.UpdateAzureNodePoolRequest() + request = azure_service.ListAzureNodePoolsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = azure_service.ListAzureNodePoolsResponse() - client.update_azure_node_pool( + client.list_azure_node_pools( request, metadata=[ ("key", "val"), @@ -9282,8 +10822,8 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_azure_node_pool_rest_bad_request( - transport: str = "rest", request_type=azure_service.UpdateAzureNodePoolRequest +def test_list_azure_node_pools_rest_bad_request( + transport: str = "rest", request_type=azure_service.ListAzureNodePoolsRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9292,9 +10832,7 @@ def test_update_azure_node_pool_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "azure_node_pool": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" - } + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" } request = request_type(**request_init) @@ -9307,10 +10845,10 @@ def test_update_azure_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_azure_node_pool(request) + client.list_azure_node_pools(request) -def test_update_azure_node_pool_rest_flattened(): +def test_list_azure_node_pools_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9319,43 +10857,42 @@ def test_update_azure_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_service.ListAzureNodePoolsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "azure_node_pool": { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" - } + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - azure_node_pool=azure_resources.AzureNodePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_azure_node_pool(**mock_args) + client.list_azure_node_pools(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{azure_node_pool.name=projects/*/locations/*/azureClusters/*/azureNodePools/*}" + "%s/v1/{parent=projects/*/locations/*/azureClusters/*}/azureNodePools" % client.transport._host, args[1], ) -def test_update_azure_node_pool_rest_flattened_error(transport: str = "rest"): +def test_list_azure_node_pools_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9364,27 +10901,85 @@ def test_update_azure_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_azure_node_pool( - azure_service.UpdateAzureNodePoolRequest(), - azure_node_pool=azure_resources.AzureNodePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_azure_node_pools( + azure_service.ListAzureNodePoolsRequest(), + parent="parent_value", ) -def test_update_azure_node_pool_rest_error(): +def test_list_azure_node_pools_rest_pager(transport: str = "rest"): client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + azure_service.ListAzureNodePoolsResponse( + azure_node_pools=[ + azure_resources.AzureNodePool(), + azure_resources.AzureNodePool(), + azure_resources.AzureNodePool(), + ], + next_page_token="abc", + ), + azure_service.ListAzureNodePoolsResponse( + azure_node_pools=[], + next_page_token="def", + ), + azure_service.ListAzureNodePoolsResponse( + azure_node_pools=[ + azure_resources.AzureNodePool(), + ], + next_page_token="ghi", + ), + azure_service.ListAzureNodePoolsResponse( + azure_node_pools=[ + azure_resources.AzureNodePool(), + azure_resources.AzureNodePool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + azure_service.ListAzureNodePoolsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + } + + pager = client.list_azure_node_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, azure_resources.AzureNodePool) for i in results) + + pages = list(client.list_azure_node_pools(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - azure_service.GetAzureNodePoolRequest, + azure_service.DeleteAzureNodePoolRequest, dict, ], ) -def test_get_azure_node_pool_rest(request_type): +def test_delete_azure_node_pool_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9399,42 +10994,23 @@ def test_get_azure_node_pool_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureNodePool( - name="name_value", - version="version_value", - subnet_id="subnet_id_value", - state=azure_resources.AzureNodePool.State.PROVISIONING, - uid="uid_value", - reconciling=True, - etag="etag_value", - azure_availability_zone="azure_availability_zone_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_node_pool(request) + response = client.delete_azure_node_pool(request) # Establish that the response is the type that we expect. - assert isinstance(response, azure_resources.AzureNodePool) - assert response.name == "name_value" - assert response.version == "version_value" - assert response.subnet_id == "subnet_id_value" - assert response.state == azure_resources.AzureNodePool.State.PROVISIONING - assert response.uid == "uid_value" - assert response.reconciling is True - assert response.etag == "etag_value" - assert response.azure_availability_zone == "azure_availability_zone_value" + assert response.operation.name == "operations/spam" -def test_get_azure_node_pool_rest_required_fields( - request_type=azure_service.GetAzureNodePoolRequest, +def test_delete_azure_node_pool_rest_required_fields( + request_type=azure_service.DeleteAzureNodePoolRequest, ): transport_class = transports.AzureClustersRestTransport @@ -9454,7 +11030,7 @@ def test_get_azure_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_node_pool._get_unset_required_fields(jsonified_request) + ).delete_azure_node_pool._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9463,7 +11039,15 @@ def test_get_azure_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_azure_node_pool._get_unset_required_fields(jsonified_request) + ).delete_azure_node_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9477,7 +11061,7 @@ def test_get_azure_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureNodePool() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9489,39 +11073,45 @@ def test_get_azure_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_azure_node_pool(request) + response = client.delete_azure_node_pool(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_azure_node_pool_rest_unset_required_fields(): +def test_delete_azure_node_pool_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_azure_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_azure_node_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "validateOnly", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_azure_node_pool_rest_interceptors(null_interceptor): +def test_delete_azure_node_pool_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9534,14 +11124,16 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_get_azure_node_pool" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AzureClustersRestInterceptor, "post_delete_azure_node_pool" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_get_azure_node_pool" + transports.AzureClustersRestInterceptor, "pre_delete_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.GetAzureNodePoolRequest.pb( - azure_service.GetAzureNodePoolRequest() + pb_message = azure_service.DeleteAzureNodePoolRequest.pb( + azure_service.DeleteAzureNodePoolRequest() ) transcode.return_value = { "method": "post", @@ -9553,19 +11145,19 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_resources.AzureNodePool.to_json( - azure_resources.AzureNodePool() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = azure_service.GetAzureNodePoolRequest() + request = azure_service.DeleteAzureNodePoolRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_resources.AzureNodePool() + post.return_value = operations_pb2.Operation() - client.get_azure_node_pool( + client.delete_azure_node_pool( request, metadata=[ ("key", "val"), @@ -9577,8 +11169,8 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_azure_node_pool_rest_bad_request( - transport: str = "rest", request_type=azure_service.GetAzureNodePoolRequest +def test_delete_azure_node_pool_rest_bad_request( + transport: str = "rest", request_type=azure_service.DeleteAzureNodePoolRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9600,10 +11192,10 @@ def test_get_azure_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_azure_node_pool(request) + client.delete_azure_node_pool(request) -def test_get_azure_node_pool_rest_flattened(): +def test_delete_azure_node_pool_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9612,7 +11204,7 @@ def test_get_azure_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_resources.AzureNodePool() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -9628,13 +11220,11 @@ def test_get_azure_node_pool_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = azure_resources.AzureNodePool.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_azure_node_pool(**mock_args) + client.delete_azure_node_pool(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -9647,7 +11237,7 @@ def test_get_azure_node_pool_rest_flattened(): ) -def test_get_azure_node_pool_rest_flattened_error(transport: str = "rest"): +def test_delete_azure_node_pool_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9656,13 +11246,13 @@ def test_get_azure_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_azure_node_pool( - azure_service.GetAzureNodePoolRequest(), + client.delete_azure_node_pool( + azure_service.DeleteAzureNodePoolRequest(), name="name_value", ) -def test_get_azure_node_pool_rest_error(): +def test_delete_azure_node_pool_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9671,11 +11261,11 @@ def test_get_azure_node_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - azure_service.ListAzureNodePoolsRequest, + azure_service.GetAzureOpenIdConfigRequest, dict, ], ) -def test_list_azure_node_pools_rest(request_type): +def test_get_azure_open_id_config_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9683,40 +11273,56 @@ def test_list_azure_node_pools_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureNodePoolsResponse( - next_page_token="next_page_token_value", + return_value = azure_resources.AzureOpenIdConfig( + issuer="issuer_value", + jwks_uri="jwks_uri_value", + response_types_supported=["response_types_supported_value"], + subject_types_supported=["subject_types_supported_value"], + id_token_signing_alg_values_supported=[ + "id_token_signing_alg_values_supported_value" + ], + claims_supported=["claims_supported_value"], + grant_types=["grant_types_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) + return_value = azure_resources.AzureOpenIdConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_node_pools(request) + response = client.get_azure_open_id_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAzureNodePoolsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, azure_resources.AzureOpenIdConfig) + assert response.issuer == "issuer_value" + assert response.jwks_uri == "jwks_uri_value" + assert response.response_types_supported == ["response_types_supported_value"] + assert response.subject_types_supported == ["subject_types_supported_value"] + assert response.id_token_signing_alg_values_supported == [ + "id_token_signing_alg_values_supported_value" + ] + assert response.claims_supported == ["claims_supported_value"] + assert response.grant_types == ["grant_types_value"] -def test_list_azure_node_pools_rest_required_fields( - request_type=azure_service.ListAzureNodePoolsRequest, +def test_get_azure_open_id_config_rest_required_fields( + request_type=azure_service.GetAzureOpenIdConfigRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["parent"] = "" + request_init["azure_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9731,28 +11337,21 @@ def test_list_azure_node_pools_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_node_pools._get_unset_required_fields(jsonified_request) + ).get_azure_open_id_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["azureCluster"] = "azure_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_azure_node_pools._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_azure_open_id_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "azureCluster" in jsonified_request + assert jsonified_request["azureCluster"] == "azure_cluster_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9761,7 +11360,7 @@ def test_list_azure_node_pools_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureNodePoolsResponse() + return_value = azure_resources.AzureOpenIdConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9782,38 +11381,30 @@ def test_list_azure_node_pools_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) + return_value = azure_resources.AzureOpenIdConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_azure_node_pools(request) + response = client.get_azure_open_id_config(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_azure_node_pools_rest_unset_required_fields(): +def test_get_azure_open_id_config_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_azure_node_pools._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_azure_open_id_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("azureCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_azure_node_pools_rest_interceptors(null_interceptor): +def test_get_azure_open_id_config_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9826,14 +11417,14 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AzureClustersRestInterceptor, "post_list_azure_node_pools" + transports.AzureClustersRestInterceptor, "post_get_azure_open_id_config" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_list_azure_node_pools" + transports.AzureClustersRestInterceptor, "pre_get_azure_open_id_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.ListAzureNodePoolsRequest.pb( - azure_service.ListAzureNodePoolsRequest() + pb_message = azure_service.GetAzureOpenIdConfigRequest.pb( + azure_service.GetAzureOpenIdConfigRequest() ) transcode.return_value = { "method": "post", @@ -9845,19 +11436,19 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = azure_service.ListAzureNodePoolsResponse.to_json( - azure_service.ListAzureNodePoolsResponse() + req.return_value._content = azure_resources.AzureOpenIdConfig.to_json( + azure_resources.AzureOpenIdConfig() ) - request = azure_service.ListAzureNodePoolsRequest() + request = azure_service.GetAzureOpenIdConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = azure_service.ListAzureNodePoolsResponse() + post.return_value = azure_resources.AzureOpenIdConfig() - client.list_azure_node_pools( + client.get_azure_open_id_config( request, metadata=[ ("key", "val"), @@ -9869,8 +11460,8 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_azure_node_pools_rest_bad_request( - transport: str = "rest", request_type=azure_service.ListAzureNodePoolsRequest +def test_get_azure_open_id_config_rest_bad_request( + transport: str = "rest", request_type=azure_service.GetAzureOpenIdConfigRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9879,7 +11470,7 @@ def test_list_azure_node_pools_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } request = request_type(**request_init) @@ -9892,10 +11483,10 @@ def test_list_azure_node_pools_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_azure_node_pools(request) + client.get_azure_open_id_config(request) -def test_list_azure_node_pools_rest_flattened(): +def test_get_azure_open_id_config_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9904,16 +11495,16 @@ def test_list_azure_node_pools_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = azure_service.ListAzureNodePoolsResponse() + return_value = azure_resources.AzureOpenIdConfig() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + azure_cluster="azure_cluster_value", ) mock_args.update(sample_request) @@ -9921,25 +11512,25 @@ def test_list_azure_node_pools_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = azure_service.ListAzureNodePoolsResponse.pb(return_value) + return_value = azure_resources.AzureOpenIdConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_azure_node_pools(**mock_args) + client.get_azure_open_id_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/azureClusters/*}/azureNodePools" + "%s/v1/{azure_cluster=projects/*/locations/*/azureClusters/*}/.well-known/openid-configuration" % client.transport._host, args[1], ) -def test_list_azure_node_pools_rest_flattened_error(transport: str = "rest"): +def test_get_azure_open_id_config_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9948,85 +11539,26 @@ def test_list_azure_node_pools_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_azure_node_pools( - azure_service.ListAzureNodePoolsRequest(), - parent="parent_value", + client.get_azure_open_id_config( + azure_service.GetAzureOpenIdConfigRequest(), + azure_cluster="azure_cluster_value", ) -def test_list_azure_node_pools_rest_pager(transport: str = "rest"): +def test_get_azure_open_id_config_rest_error(): client = AzureClustersClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - azure_service.ListAzureNodePoolsResponse( - azure_node_pools=[ - azure_resources.AzureNodePool(), - azure_resources.AzureNodePool(), - azure_resources.AzureNodePool(), - ], - next_page_token="abc", - ), - azure_service.ListAzureNodePoolsResponse( - azure_node_pools=[], - next_page_token="def", - ), - azure_service.ListAzureNodePoolsResponse( - azure_node_pools=[ - azure_resources.AzureNodePool(), - ], - next_page_token="ghi", - ), - azure_service.ListAzureNodePoolsResponse( - azure_node_pools=[ - azure_resources.AzureNodePool(), - azure_resources.AzureNodePool(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - azure_service.ListAzureNodePoolsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/azureClusters/sample3" - } - - pager = client.list_azure_node_pools(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, azure_resources.AzureNodePool) for i in results) - - pages = list(client.list_azure_node_pools(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - azure_service.DeleteAzureNodePoolRequest, + azure_service.GetAzureJsonWebKeysRequest, dict, ], ) -def test_delete_azure_node_pool_rest(request_type): +def test_get_azure_json_web_keys_rest(request_type): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10034,35 +11566,37 @@ def test_delete_azure_node_pool_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureJsonWebKeys() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureJsonWebKeys.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_node_pool(request) + response = client.get_azure_json_web_keys(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, azure_resources.AzureJsonWebKeys) -def test_delete_azure_node_pool_rest_required_fields( - request_type=azure_service.DeleteAzureNodePoolRequest, +def test_get_azure_json_web_keys_rest_required_fields( + request_type=azure_service.GetAzureJsonWebKeysRequest, ): transport_class = transports.AzureClustersRestTransport request_init = {} - request_init["name"] = "" + request_init["azure_cluster"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10077,29 +11611,21 @@ def test_delete_azure_node_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_node_pool._get_unset_required_fields(jsonified_request) + ).get_azure_json_web_keys._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["azureCluster"] = "azure_cluster_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_azure_node_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "validate_only", - ) - ) + ).get_azure_json_web_keys._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "azureCluster" in jsonified_request + assert jsonified_request["azureCluster"] == "azure_cluster_value" client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10108,7 +11634,7 @@ def test_delete_azure_node_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureJsonWebKeys() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10120,45 +11646,39 @@ def test_delete_azure_node_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = azure_resources.AzureJsonWebKeys.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_azure_node_pool(request) + response = client.get_azure_json_web_keys(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_azure_node_pool_rest_unset_required_fields(): +def test_get_azure_json_web_keys_rest_unset_required_fields(): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_azure_node_pool._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "validateOnly", - ) - ) - & set(("name",)) - ) + unset_fields = transport.get_azure_json_web_keys._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("azureCluster",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_azure_node_pool_rest_interceptors(null_interceptor): +def test_get_azure_json_web_keys_rest_interceptors(null_interceptor): transport = transports.AzureClustersRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10171,16 +11691,14 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AzureClustersRestInterceptor, "post_delete_azure_node_pool" + transports.AzureClustersRestInterceptor, "post_get_azure_json_web_keys" ) as post, mock.patch.object( - transports.AzureClustersRestInterceptor, "pre_delete_azure_node_pool" + transports.AzureClustersRestInterceptor, "pre_get_azure_json_web_keys" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = azure_service.DeleteAzureNodePoolRequest.pb( - azure_service.DeleteAzureNodePoolRequest() + pb_message = azure_service.GetAzureJsonWebKeysRequest.pb( + azure_service.GetAzureJsonWebKeysRequest() ) transcode.return_value = { "method": "post", @@ -10192,19 +11710,19 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = azure_resources.AzureJsonWebKeys.to_json( + azure_resources.AzureJsonWebKeys() ) - request = azure_service.DeleteAzureNodePoolRequest() + request = azure_service.GetAzureJsonWebKeysRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = azure_resources.AzureJsonWebKeys() - client.delete_azure_node_pool( + client.get_azure_json_web_keys( request, metadata=[ ("key", "val"), @@ -10216,8 +11734,8 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_azure_node_pool_rest_bad_request( - transport: str = "rest", request_type=azure_service.DeleteAzureNodePoolRequest +def test_get_azure_json_web_keys_rest_bad_request( + transport: str = "rest", request_type=azure_service.GetAzureJsonWebKeysRequest ): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10226,7 +11744,7 @@ def test_delete_azure_node_pool_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } request = request_type(**request_init) @@ -10239,10 +11757,10 @@ def test_delete_azure_node_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_azure_node_pool(request) + client.get_azure_json_web_keys(request) -def test_delete_azure_node_pool_rest_flattened(): +def test_get_azure_json_web_keys_rest_flattened(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10251,40 +11769,42 @@ def test_delete_azure_node_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = azure_resources.AzureJsonWebKeys() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/azureClusters/sample3/azureNodePools/sample4" + "azure_cluster": "projects/sample1/locations/sample2/azureClusters/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + azure_cluster="azure_cluster_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = azure_resources.AzureJsonWebKeys.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_azure_node_pool(**mock_args) + client.get_azure_json_web_keys(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/azureClusters/*/azureNodePools/*}" + "%s/v1/{azure_cluster=projects/*/locations/*/azureClusters/*}/jwks" % client.transport._host, args[1], ) -def test_delete_azure_node_pool_rest_flattened_error(transport: str = "rest"): +def test_get_azure_json_web_keys_rest_flattened_error(transport: str = "rest"): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10293,13 +11813,13 @@ def test_delete_azure_node_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_azure_node_pool( - azure_service.DeleteAzureNodePoolRequest(), - name="name_value", + client.get_azure_json_web_keys( + azure_service.GetAzureJsonWebKeysRequest(), + azure_cluster="azure_cluster_value", ) -def test_delete_azure_node_pool_rest_error(): +def test_get_azure_json_web_keys_rest_error(): client = AzureClustersClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10728,12 +12248,15 @@ def test_azure_clusters_base_transport(): "get_azure_cluster", "list_azure_clusters", "delete_azure_cluster", + "generate_azure_cluster_agent_token", "generate_azure_access_token", "create_azure_node_pool", "update_azure_node_pool", "get_azure_node_pool", "list_azure_node_pools", "delete_azure_node_pool", + "get_azure_open_id_config", + "get_azure_json_web_keys", "get_azure_server_config", "get_operation", "cancel_operation", @@ -11046,6 +12569,9 @@ def test_azure_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.delete_azure_cluster._session session2 = client2.transport.delete_azure_cluster._session assert session1 != session2 + session1 = client1.transport.generate_azure_cluster_agent_token._session + session2 = client2.transport.generate_azure_cluster_agent_token._session + assert session1 != session2 session1 = client1.transport.generate_azure_access_token._session session2 = client2.transport.generate_azure_access_token._session assert session1 != session2 @@ -11064,6 +12590,12 @@ def test_azure_clusters_client_transport_session_collision(transport_name): session1 = client1.transport.delete_azure_node_pool._session session2 = client2.transport.delete_azure_node_pool._session assert session1 != session2 + session1 = client1.transport.get_azure_open_id_config._session + session2 = client2.transport.get_azure_open_id_config._session + assert session1 != session2 + session1 = client1.transport.get_azure_json_web_keys._session + session2 = client2.transport.get_azure_json_web_keys._session + assert session1 != session2 session1 = client1.transport.get_azure_server_config._session session2 = client2.transport.get_azure_server_config._session assert session1 != session2 diff --git a/packages/google-cloud-monitoring/CHANGELOG.md b/packages/google-cloud-monitoring/CHANGELOG.md index c469f01e7acc..5e7fd2029aa4 100644 --- a/packages/google-cloud-monitoring/CHANGELOG.md +++ b/packages/google-cloud-monitoring/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-monitoring/#history +## [2.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-monitoring-v2.17.0...google-cloud-monitoring-v2.18.0) (2023-12-12) + + +### Features + +* Added support for severity in AlertPolicy ([4e817f8](https://github.com/googleapis/google-cloud-python/commit/4e817f8dac1e884e5eab4f81a43d129635d83369)) + + +### Documentation + +* add value range to comment on field forecast_horizon ([4e817f8](https://github.com/googleapis/google-cloud-python/commit/4e817f8dac1e884e5eab4f81a43d129635d83369)) + ## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-monitoring-v2.16.0...google-cloud-monitoring-v2.17.0) (2023-12-07) diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py index 023a4c389be5..e718b6d93d2a 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py index 023a4c389be5..e718b6d93d2a 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py index 2d1963ce19c2..cbb109bacf1b 100644 --- a/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py +++ b/packages/google-cloud-monitoring/google/cloud/monitoring_v3/types/alert.py @@ -148,6 +148,12 @@ class AlertPolicy(proto.Message): alert_strategy (google.cloud.monitoring_v3.types.AlertPolicy.AlertStrategy): Control over how this alert policy's notification channels are notified. + severity (google.cloud.monitoring_v3.types.AlertPolicy.Severity): + Optional. The severity of an alert policy + indicates how important incidents generated by + that policy are. The severity level will be + displayed on the Incident detail page and in + notifications. """ class ConditionCombinerType(proto.Enum): @@ -176,6 +182,33 @@ class ConditionCombinerType(proto.Enum): OR = 2 AND_WITH_MATCHING_RESOURCE = 3 + class Severity(proto.Enum): + r"""An enumeration of possible severity level for an Alert + Policy. + + Values: + SEVERITY_UNSPECIFIED (0): + No severity is specified. This is the default + value. + CRITICAL (1): + This is the highest severity level. Use this + if the problem could cause significant damage or + downtime. + ERROR (2): + This is the medium severity level. Use this + if the problem could cause minor damage or + downtime. + WARNING (3): + This is the lowest severity level. Use this + if the problem is not causing any damage or + downtime, but could potentially lead to a + problem in the future. + """ + SEVERITY_UNSPECIFIED = 0 + CRITICAL = 1 + ERROR = 2 + WARNING = 3 + class Documentation(proto.Message): r"""A content string and a MIME type that describes the content string's format. @@ -474,7 +507,8 @@ class ForecastOptions(proto.Message): predicted value is found to violate the threshold, and the violation is observed in all forecasts made for the configured ``duration``, then the time series is considered - to be failing. + to be failing. The forecast horizon can range from 1 hour to + 60 hours. """ forecast_horizon: duration_pb2.Duration = proto.Field( @@ -1008,6 +1042,11 @@ class NotificationChannelStrategy(proto.Message): number=21, message=AlertStrategy, ) + severity: Severity = proto.Field( + proto.ENUM, + number=22, + enum=Severity, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json index 117a05e4d4dc..b8c50e67737f 100644 --- a/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json +++ b/packages/google-cloud-monitoring/samples/generated_samples/snippet_metadata_google.monitoring.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-monitoring", - "version": "2.17.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py index 83c478a22ec4..27c342072aa2 100644 --- a/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py +++ b/packages/google-cloud-monitoring/tests/unit/gapic/monitoring_v3/test_alert_policy_service.py @@ -1183,6 +1183,7 @@ def test_get_alert_policy(request_type, transport: str = "grpc"): display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) response = client.get_alert_policy(request) @@ -1197,6 +1198,7 @@ def test_get_alert_policy(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL def test_get_alert_policy_empty_call(): @@ -1237,6 +1239,7 @@ async def test_get_alert_policy_async( display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) ) response = await client.get_alert_policy(request) @@ -1252,6 +1255,7 @@ async def test_get_alert_policy_async( assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL @pytest.mark.asyncio @@ -1425,6 +1429,7 @@ def test_create_alert_policy(request_type, transport: str = "grpc"): display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) response = client.create_alert_policy(request) @@ -1439,6 +1444,7 @@ def test_create_alert_policy(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL def test_create_alert_policy_empty_call(): @@ -1483,6 +1489,7 @@ async def test_create_alert_policy_async( display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) ) response = await client.create_alert_policy(request) @@ -1498,6 +1505,7 @@ async def test_create_alert_policy_async( assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL @pytest.mark.asyncio @@ -1923,6 +1931,7 @@ def test_update_alert_policy(request_type, transport: str = "grpc"): display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) response = client.update_alert_policy(request) @@ -1937,6 +1946,7 @@ def test_update_alert_policy(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL def test_update_alert_policy_empty_call(): @@ -1981,6 +1991,7 @@ async def test_update_alert_policy_async( display_name="display_name_value", combiner=alert.AlertPolicy.ConditionCombinerType.AND, notification_channels=["notification_channels_value"], + severity=alert.AlertPolicy.Severity.CRITICAL, ) ) response = await client.update_alert_policy(request) @@ -1996,6 +2007,7 @@ async def test_update_alert_policy_async( assert response.display_name == "display_name_value" assert response.combiner == alert.AlertPolicy.ConditionCombinerType.AND assert response.notification_channels == ["notification_channels_value"] + assert response.severity == alert.AlertPolicy.Severity.CRITICAL @pytest.mark.asyncio diff --git a/packages/google-cloud-netapp/CHANGELOG.md b/packages/google-cloud-netapp/CHANGELOG.md index 7d145aba82a2..6625338d58b8 100644 --- a/packages/google-cloud-netapp/CHANGELOG.md +++ b/packages/google-cloud-netapp/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.3.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.2...google-cloud-netapp-v0.3.3) (2024-01-04) + + +### Features + +* Add singular and plural annotations ([b21ac63](https://github.com/googleapis/google-cloud-python/commit/b21ac63d41113dfd9880b4e4ab1fe10928c7b72b)) +* Enable Backup, Backup Vault, and Backup Policy ([b21ac63](https://github.com/googleapis/google-cloud-python/commit/b21ac63d41113dfd9880b4e4ab1fe10928c7b72b)) +* Set field_behavior to IDENTIFIER on the "name" fields ([b21ac63](https://github.com/googleapis/google-cloud-python/commit/b21ac63d41113dfd9880b4e4ab1fe10928c7b72b)) + + +### Documentation + +* Comments are updated for several fields/enums ([b21ac63](https://github.com/googleapis/google-cloud-python/commit/b21ac63d41113dfd9880b4e4ab1fe10928c7b72b)) + ## [0.3.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-netapp-v0.3.1...google-cloud-netapp-v0.3.2) (2023-12-07) diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 97b1d971fea9..224bc8c95ea1 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -29,6 +29,33 @@ ListActiveDirectoriesResponse, UpdateActiveDirectoryRequest, ) +from google.cloud.netapp_v1.types.backup import ( + Backup, + CreateBackupRequest, + DeleteBackupRequest, + GetBackupRequest, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from google.cloud.netapp_v1.types.backup_policy import ( + BackupPolicy, + CreateBackupPolicyRequest, + DeleteBackupPolicyRequest, + GetBackupPolicyRequest, + ListBackupPoliciesRequest, + ListBackupPoliciesResponse, + UpdateBackupPolicyRequest, +) +from google.cloud.netapp_v1.types.backup_vault import ( + BackupVault, + CreateBackupVaultRequest, + DeleteBackupVaultRequest, + GetBackupVaultRequest, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + UpdateBackupVaultRequest, +) from google.cloud.netapp_v1.types.cloud_netapp_service import OperationMetadata from google.cloud.netapp_v1.types.common import EncryptionType, ServiceLevel from google.cloud.netapp_v1.types.kms import ( @@ -77,6 +104,7 @@ ) from google.cloud.netapp_v1.types.volume import ( AccessType, + BackupConfig, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -110,6 +138,27 @@ "ListActiveDirectoriesRequest", "ListActiveDirectoriesResponse", "UpdateActiveDirectoryRequest", + "Backup", + "CreateBackupRequest", + "DeleteBackupRequest", + "GetBackupRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "UpdateBackupRequest", + "BackupPolicy", + "CreateBackupPolicyRequest", + "DeleteBackupPolicyRequest", + "GetBackupPolicyRequest", + "ListBackupPoliciesRequest", + "ListBackupPoliciesResponse", + "UpdateBackupPolicyRequest", + "BackupVault", + "CreateBackupVaultRequest", + "DeleteBackupVaultRequest", + "GetBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "UpdateBackupVaultRequest", "OperationMetadata", "EncryptionType", "ServiceLevel", @@ -149,6 +198,7 @@ "ListStoragePoolsResponse", "StoragePool", "UpdateStoragePoolRequest", + "BackupConfig", "CreateVolumeRequest", "DailySchedule", "DeleteVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py index 78e859312100..a01b131351cc 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.2" # {x-release-please-version} +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index a638dd2acf3e..ee313d7f779e 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -28,6 +28,33 @@ ListActiveDirectoriesResponse, UpdateActiveDirectoryRequest, ) +from .types.backup import ( + Backup, + CreateBackupRequest, + DeleteBackupRequest, + GetBackupRequest, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .types.backup_policy import ( + BackupPolicy, + CreateBackupPolicyRequest, + DeleteBackupPolicyRequest, + GetBackupPolicyRequest, + ListBackupPoliciesRequest, + ListBackupPoliciesResponse, + UpdateBackupPolicyRequest, +) +from .types.backup_vault import ( + BackupVault, + CreateBackupVaultRequest, + DeleteBackupVaultRequest, + GetBackupVaultRequest, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + UpdateBackupVaultRequest, +) from .types.cloud_netapp_service import OperationMetadata from .types.common import EncryptionType, ServiceLevel from .types.kms import ( @@ -76,6 +103,7 @@ ) from .types.volume import ( AccessType, + BackupConfig, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -103,7 +131,14 @@ "NetAppAsyncClient", "AccessType", "ActiveDirectory", + "Backup", + "BackupConfig", + "BackupPolicy", + "BackupVault", "CreateActiveDirectoryRequest", + "CreateBackupPolicyRequest", + "CreateBackupRequest", + "CreateBackupVaultRequest", "CreateKmsConfigRequest", "CreateReplicationRequest", "CreateSnapshotRequest", @@ -111,6 +146,9 @@ "CreateVolumeRequest", "DailySchedule", "DeleteActiveDirectoryRequest", + "DeleteBackupPolicyRequest", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", "DeleteKmsConfigRequest", "DeleteReplicationRequest", "DeleteSnapshotRequest", @@ -121,6 +159,9 @@ "EncryptionType", "ExportPolicy", "GetActiveDirectoryRequest", + "GetBackupPolicyRequest", + "GetBackupRequest", + "GetBackupVaultRequest", "GetKmsConfigRequest", "GetReplicationRequest", "GetSnapshotRequest", @@ -130,6 +171,12 @@ "KmsConfig", "ListActiveDirectoriesRequest", "ListActiveDirectoriesResponse", + "ListBackupPoliciesRequest", + "ListBackupPoliciesResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListBackupsRequest", + "ListBackupsResponse", "ListKmsConfigsRequest", "ListKmsConfigsResponse", "ListReplicationsRequest", @@ -161,6 +208,9 @@ "StoragePool", "TransferStats", "UpdateActiveDirectoryRequest", + "UpdateBackupPolicyRequest", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", "UpdateKmsConfigRequest", "UpdateReplicationRequest", "UpdateSnapshotRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json index 601d3fbd07fc..ca240ed3ab2f 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_metadata.json @@ -15,6 +15,21 @@ "create_active_directory" ] }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupPolicy": { + "methods": [ + "create_backup_policy" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -45,6 +60,21 @@ "delete_active_directory" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPolicy": { + "methods": [ + "delete_backup_policy" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -80,6 +110,21 @@ "get_active_directory" ] }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPolicy": { + "methods": [ + "get_backup_policy" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -110,6 +155,21 @@ "list_active_directories" ] }, + "ListBackupPolicies": { + "methods": [ + "list_backup_policies" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -160,6 +220,21 @@ "update_active_directory" ] }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupPolicy": { + "methods": [ + "update_backup_policy" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" @@ -200,6 +275,21 @@ "create_active_directory" ] }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupPolicy": { + "methods": [ + "create_backup_policy" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -230,6 +320,21 @@ "delete_active_directory" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPolicy": { + "methods": [ + "delete_backup_policy" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -265,6 +370,21 @@ "get_active_directory" ] }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPolicy": { + "methods": [ + "get_backup_policy" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -295,6 +415,21 @@ "list_active_directories" ] }, + "ListBackupPolicies": { + "methods": [ + "list_backup_policies" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -345,6 +480,21 @@ "update_active_directory" ] }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupPolicy": { + "methods": [ + "update_backup_policy" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" @@ -385,6 +535,21 @@ "create_active_directory" ] }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupPolicy": { + "methods": [ + "create_backup_policy" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateKmsConfig": { "methods": [ "create_kms_config" @@ -415,6 +580,21 @@ "delete_active_directory" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPolicy": { + "methods": [ + "delete_backup_policy" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteKmsConfig": { "methods": [ "delete_kms_config" @@ -450,6 +630,21 @@ "get_active_directory" ] }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPolicy": { + "methods": [ + "get_backup_policy" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, "GetKmsConfig": { "methods": [ "get_kms_config" @@ -480,6 +675,21 @@ "list_active_directories" ] }, + "ListBackupPolicies": { + "methods": [ + "list_backup_policies" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListKmsConfigs": { "methods": [ "list_kms_configs" @@ -530,6 +740,21 @@ "update_active_directory" ] }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupPolicy": { + "methods": [ + "update_backup_policy" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, "UpdateKmsConfig": { "methods": [ "update_kms_config" diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py index 78e859312100..a01b131351cc 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.2" # {x-release-please-version} +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py index dcaab8f3db15..dbf9f017a815 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/async_client.py @@ -53,6 +53,12 @@ from google.cloud.netapp_v1.services.net_app import pagers from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -78,6 +84,12 @@ class NetAppAsyncClient: active_directory_path = staticmethod(NetAppClient.active_directory_path) parse_active_directory_path = staticmethod(NetAppClient.parse_active_directory_path) + backup_path = staticmethod(NetAppClient.backup_path) + parse_backup_path = staticmethod(NetAppClient.parse_backup_path) + backup_policy_path = staticmethod(NetAppClient.backup_policy_path) + parse_backup_policy_path = staticmethod(NetAppClient.parse_backup_policy_path) + backup_vault_path = staticmethod(NetAppClient.backup_vault_path) + parse_backup_vault_path = staticmethod(NetAppClient.parse_backup_vault_path) kms_config_path = staticmethod(NetAppClient.kms_config_path) parse_kms_config_path = staticmethod(NetAppClient.parse_kms_config_path) network_path = staticmethod(NetAppClient.network_path) @@ -4632,6 +4644,1920 @@ async def sample_reverse_replication_direction(): # Done; return the response. return response + async def create_backup_vault( + self, + request: Optional[ + Union[gcn_backup_vault.CreateBackupVaultRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[gcn_backup_vault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new backup vault + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_create_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.CreateBackupVaultRequest, dict]]): + The request object. CreateBackupVaultRequest creates a + backup vault. + parent (:class:`str`): + Required. The location to create the backup vaults, in + the format + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (:class:`google.cloud.netapp_v1.types.BackupVault`): + Required. A backupVault resource + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (:class:`str`): + Required. The ID to use for the backupVault. The ID must + be unique within the specified location. The max + supported length is 63 characters. This value must start + with a lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end with a + hyphen. Values that do not match this pattern will + trigger an INVALID_ARGUMENT error. + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupVault` A + NetApp BackupVault. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup_vault.CreateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup_vault, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup_vault.BackupVault, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_vault( + self, + request: Optional[Union[backup_vault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_vault.BackupVault: + r"""Returns the description of the specified backup vault + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_get_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.GetBackupVaultRequest, dict]]): + The request object. GetBackupVaultRequest gets the state + of a backupVault. + name (:class:`str`): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.BackupVault: + A NetApp BackupVault. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_vault.GetBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_vaults( + self, + request: Optional[Union[backup_vault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsAsyncPager: + r"""Returns list of all available backup vaults. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_list_backup_vaults(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.ListBackupVaultsRequest, dict]]): + The request object. ListBackupVaultsRequest lists + backupVaults. + parent (:class:`str`): + Required. The location for which to retrieve backupVault + information, in the format + ``projects/{project_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupVaultsAsyncPager: + ListBackupVaultsResponse is the + result of ListBackupVaultsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_vault.ListBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_vault( + self, + request: Optional[ + Union[gcn_backup_vault.UpdateBackupVaultRequest, dict] + ] = None, + *, + backup_vault: Optional[gcn_backup_vault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a specific backup vault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_update_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.UpdateBackupVaultRequest, dict]]): + The request object. UpdateBackupVaultRequest updates + description and/or labels for a + backupVault. + backup_vault (:class:`google.cloud.netapp_v1.types.BackupVault`): + Required. The backupVault being + updated + + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupVault` A + NetApp BackupVault. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup_vault.UpdateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup_vault, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup_vault.BackupVault, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_vault( + self, + request: Optional[Union[backup_vault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Warning! This operation will permanently delete the + backup vault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_delete_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.DeleteBackupVaultRequest, dict]]): + The request object. DeleteBackupVaultRequest deletes a + backupVault. + name (:class:`str`): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_vault.DeleteBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup_vault, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup( + self, + request: Optional[Union[gcn_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gcn_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a backup from the volume specified in the + request The backup can be created from the given + snapshot if specified in the request. If no snapshot + specified, there'll be a new snapshot taken to initiate + the backup creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_create_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.CreateBackupRequest, dict]]): + The request object. CreateBackupRequest creates a backup. + parent (:class:`str`): + Required. The NetApp backupVault to create the backups + of, in the format + ``projects/*/locations/*/backupVaults/{backup_vault_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.netapp_v1.types.Backup`): + Required. A backup resource + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The ID to use for the backup. The ID must be + unique within the specified backupVault. This value must + start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens, and cannot end + with a hyphen. Values that do not match this pattern + will trigger an INVALID_ARGUMENT error. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.Backup` A NetApp + Backup. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup.Backup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Returns the description of the specified backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_get_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.GetBackupRequest, dict]]): + The request object. GetBackupRequest gets the state of a + backup. + name (:class:`str`): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.Backup: + A NetApp Backup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Returns descriptions of all backups for a + backupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_list_backups(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.ListBackupsRequest, dict]]): + The request object. ListBackupsRequest lists backups. + parent (:class:`str`): + Required. The backupVault for which to retrieve backup + information, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. + To retrieve backup information for all locations, use + "-" for the ``{location}`` value. To retrieve backup + information for all backupVaults, use "-" for the + ``{backup_vault_id}`` value. To retrieve backup + information for a volume, use "-" for the + ``{backup_vault_id}`` value and specify volume full name + with the filter. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupsAsyncPager: + ListBackupsResponse is the result of + ListBackupsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Warning! This operation will permanently delete the + backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_delete_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.DeleteBackupRequest, dict]]): + The request object. DeleteBackupRequest deletes a backup. + name (:class:`str`): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[gcn_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gcn_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update backup with full spec. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_update_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.UpdateBackupRequest, dict]]): + The request object. UpdateBackupRequest updates + description and/or labels for a backup. + backup (:class:`google.cloud.netapp_v1.types.Backup`): + Required. The backup being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.Backup` A NetApp + Backup. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup.Backup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_policy( + self, + request: Optional[ + Union[gcn_backup_policy.CreateBackupPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_policy: Optional[gcn_backup_policy.BackupPolicy] = None, + backup_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates new backup policy + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_create_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupPolicyRequest( + parent="parent_value", + backup_policy_id="backup_policy_id_value", + ) + + # Make the request + operation = client.create_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.CreateBackupPolicyRequest, dict]]): + The request object. CreateBackupPolicyRequest creates a + backupPolicy. + parent (:class:`str`): + Required. The location to create the backup policies of, + in the format + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_policy (:class:`google.cloud.netapp_v1.types.BackupPolicy`): + Required. A backupPolicy resource + This corresponds to the ``backup_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_policy_id (:class:`str`): + Required. The ID to use for the + backup policy. The ID must be unique + within the specified location. This + value must start with a lowercase letter + followed by up to 62 lowercase letters, + numbers, or hyphens, and cannot end with + a hyphen. + + This corresponds to the ``backup_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupPolicy` + Backup Policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_policy, backup_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup_policy.CreateBackupPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_policy is not None: + request.backup_policy = backup_policy + if backup_policy_id is not None: + request.backup_policy_id = backup_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup_policy, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup_policy.BackupPolicy, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_policy( + self, + request: Optional[Union[backup_policy.GetBackupPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_policy.BackupPolicy: + r"""Returns the description of the specified backup policy by + backup_policy_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_get_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.GetBackupPolicyRequest, dict]]): + The request object. GetBackupPolicyRequest gets the state + of a backupPolicy. + name (:class:`str`): + Required. The backupPolicy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.BackupPolicy: + Backup Policy. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_policy.GetBackupPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_policies( + self, + request: Optional[Union[backup_policy.ListBackupPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPoliciesAsyncPager: + r"""Returns list of all available backup policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_list_backup_policies(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.ListBackupPoliciesRequest, dict]]): + The request object. ListBackupPoliciesRequest for + requesting multiple backup policies. + parent (:class:`str`): + Required. Parent value for + ListBackupPoliciesRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupPoliciesAsyncPager: + ListBackupPoliciesResponse contains + all the backup policies requested. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_policy.ListBackupPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backup_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_policy( + self, + request: Optional[ + Union[gcn_backup_policy.UpdateBackupPolicyRequest, dict] + ] = None, + *, + backup_policy: Optional[gcn_backup_policy.BackupPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates settings of a specific backup policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_update_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupPolicyRequest( + ) + + # Make the request + operation = client.update_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.UpdateBackupPolicyRequest, dict]]): + The request object. UpdateBackupPolicyRequest for + updating a backup policy. + backup_policy (:class:`google.cloud.netapp_v1.types.BackupPolicy`): + Required. The backup policy being + updated + + This corresponds to the ``backup_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup Policy resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupPolicy` + Backup Policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcn_backup_policy.UpdateBackupPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_policy is not None: + request.backup_policy = backup_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup_policy, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_policy.name", request.backup_policy.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backup_policy.BackupPolicy, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_policy( + self, + request: Optional[Union[backup_policy.DeleteBackupPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Warning! This operation will permanently delete the + backup policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + async def sample_delete_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.netapp_v1.types.DeleteBackupPolicyRequest, dict]]): + The request object. DeleteBackupPolicyRequest deletes a + backup policy. + name (:class:`str`): + Required. The backup policy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = backup_policy.DeleteBackupPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup_policy, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py index 223bbfc2db74..05e7a2c3c567 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/client.py @@ -57,6 +57,12 @@ from google.cloud.netapp_v1.services.net_app import pagers from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -215,6 +221,74 @@ def parse_active_directory_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def backup_path( + project: str, + location: str, + backup_vault: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupVaults/{backup_vault}/backups/{backup}".format( + project=project, + location=location, + backup_vault=backup_vault, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_policy_path( + project: str, + location: str, + backup_policy: str, + ) -> str: + """Returns a fully-qualified backup_policy string.""" + return "projects/{project}/locations/{location}/backupPolicies/{backup_policy}".format( + project=project, + location=location, + backup_policy=backup_policy, + ) + + @staticmethod + def parse_backup_policy_path(path: str) -> Dict[str, str]: + """Parses a backup_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_vault_path( + project: str, + location: str, + backup_vault: str, + ) -> str: + """Returns a fully-qualified backup_vault string.""" + return "projects/{project}/locations/{location}/backupVaults/{backup_vault}".format( + project=project, + location=location, + backup_vault=backup_vault, + ) + + @staticmethod + def parse_backup_vault_path(path: str) -> Dict[str, str]: + """Parses a backup_vault path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def kms_config_path( project: str, @@ -4890,6 +4964,1866 @@ def sample_reverse_replication_direction(): # Done; return the response. return response + def create_backup_vault( + self, + request: Optional[ + Union[gcn_backup_vault.CreateBackupVaultRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[gcn_backup_vault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new backup vault + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_create_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.CreateBackupVaultRequest, dict]): + The request object. CreateBackupVaultRequest creates a + backup vault. + parent (str): + Required. The location to create the backup vaults, in + the format + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (google.cloud.netapp_v1.types.BackupVault): + Required. A backupVault resource + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (str): + Required. The ID to use for the backupVault. The ID must + be unique within the specified location. The max + supported length is 63 characters. This value must start + with a lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end with a + hyphen. Values that do not match this pattern will + trigger an INVALID_ARGUMENT error. + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupVault` A + NetApp BackupVault. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup_vault.CreateBackupVaultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup_vault.CreateBackupVaultRequest): + request = gcn_backup_vault.CreateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup_vault.BackupVault, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_vault( + self, + request: Optional[Union[backup_vault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_vault.BackupVault: + r"""Returns the description of the specified backup vault + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_get_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.GetBackupVaultRequest, dict]): + The request object. GetBackupVaultRequest gets the state + of a backupVault. + name (str): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.BackupVault: + A NetApp BackupVault. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_vault.GetBackupVaultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_vault.GetBackupVaultRequest): + request = backup_vault.GetBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_vaults( + self, + request: Optional[Union[backup_vault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsPager: + r"""Returns list of all available backup vaults. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_list_backup_vaults(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.ListBackupVaultsRequest, dict]): + The request object. ListBackupVaultsRequest lists + backupVaults. + parent (str): + Required. The location for which to retrieve backupVault + information, in the format + ``projects/{project_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupVaultsPager: + ListBackupVaultsResponse is the + result of ListBackupVaultsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_vault.ListBackupVaultsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_vault.ListBackupVaultsRequest): + request = backup_vault.ListBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupVaultsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_vault( + self, + request: Optional[ + Union[gcn_backup_vault.UpdateBackupVaultRequest, dict] + ] = None, + *, + backup_vault: Optional[gcn_backup_vault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a specific backup vault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_update_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.UpdateBackupVaultRequest, dict]): + The request object. UpdateBackupVaultRequest updates + description and/or labels for a + backupVault. + backup_vault (google.cloud.netapp_v1.types.BackupVault): + Required. The backupVault being + updated + + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupVault` A + NetApp BackupVault. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup_vault.UpdateBackupVaultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup_vault.UpdateBackupVaultRequest): + request = gcn_backup_vault.UpdateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup_vault.BackupVault, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_vault( + self, + request: Optional[Union[backup_vault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Warning! This operation will permanently delete the + backup vault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_delete_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.DeleteBackupVaultRequest, dict]): + The request object. DeleteBackupVaultRequest deletes a + backupVault. + name (str): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_vault.DeleteBackupVaultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_vault.DeleteBackupVaultRequest): + request = backup_vault.DeleteBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup( + self, + request: Optional[Union[gcn_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gcn_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a backup from the volume specified in the + request The backup can be created from the given + snapshot if specified in the request. If no snapshot + specified, there'll be a new snapshot taken to initiate + the backup creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_create_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.CreateBackupRequest, dict]): + The request object. CreateBackupRequest creates a backup. + parent (str): + Required. The NetApp backupVault to create the backups + of, in the format + ``projects/*/locations/*/backupVaults/{backup_vault_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.netapp_v1.types.Backup): + Required. A backup resource + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The ID to use for the backup. The ID must be + unique within the specified backupVault. This value must + start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens, and cannot end + with a hyphen. Values that do not match this pattern + will trigger an INVALID_ARGUMENT error. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.Backup` A NetApp + Backup. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup.CreateBackupRequest): + request = gcn_backup.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup.Backup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Returns the description of the specified backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_get_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.GetBackupRequest, dict]): + The request object. GetBackupRequest gets the state of a + backup. + name (str): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.Backup: + A NetApp Backup. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Returns descriptions of all backups for a + backupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_list_backups(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.ListBackupsRequest, dict]): + The request object. ListBackupsRequest lists backups. + parent (str): + Required. The backupVault for which to retrieve backup + information, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. + To retrieve backup information for all locations, use + "-" for the ``{location}`` value. To retrieve backup + information for all backupVaults, use "-" for the + ``{backup_vault_id}`` value. To retrieve backup + information for a volume, use "-" for the + ``{backup_vault_id}`` value and specify volume full name + with the filter. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupsPager: + ListBackupsResponse is the result of + ListBackupsRequest. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Warning! This operation will permanently delete the + backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_delete_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.DeleteBackupRequest, dict]): + The request object. DeleteBackupRequest deletes a backup. + name (str): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[gcn_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gcn_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update backup with full spec. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_update_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.UpdateBackupRequest, dict]): + The request object. UpdateBackupRequest updates + description and/or labels for a backup. + backup (google.cloud.netapp_v1.types.Backup): + Required. The backup being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.Backup` A NetApp + Backup. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup.UpdateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup.UpdateBackupRequest): + request = gcn_backup.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup.Backup, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_policy( + self, + request: Optional[ + Union[gcn_backup_policy.CreateBackupPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_policy: Optional[gcn_backup_policy.BackupPolicy] = None, + backup_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates new backup policy + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_create_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupPolicyRequest( + parent="parent_value", + backup_policy_id="backup_policy_id_value", + ) + + # Make the request + operation = client.create_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.CreateBackupPolicyRequest, dict]): + The request object. CreateBackupPolicyRequest creates a + backupPolicy. + parent (str): + Required. The location to create the backup policies of, + in the format + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_policy (google.cloud.netapp_v1.types.BackupPolicy): + Required. A backupPolicy resource + This corresponds to the ``backup_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_policy_id (str): + Required. The ID to use for the + backup policy. The ID must be unique + within the specified location. This + value must start with a lowercase letter + followed by up to 62 lowercase letters, + numbers, or hyphens, and cannot end with + a hyphen. + + This corresponds to the ``backup_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupPolicy` + Backup Policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_policy, backup_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup_policy.CreateBackupPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup_policy.CreateBackupPolicyRequest): + request = gcn_backup_policy.CreateBackupPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_policy is not None: + request.backup_policy = backup_policy + if backup_policy_id is not None: + request.backup_policy_id = backup_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup_policy.BackupPolicy, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_policy( + self, + request: Optional[Union[backup_policy.GetBackupPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_policy.BackupPolicy: + r"""Returns the description of the specified backup policy by + backup_policy_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_get_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.GetBackupPolicyRequest, dict]): + The request object. GetBackupPolicyRequest gets the state + of a backupPolicy. + name (str): + Required. The backupPolicy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.types.BackupPolicy: + Backup Policy. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_policy.GetBackupPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_policy.GetBackupPolicyRequest): + request = backup_policy.GetBackupPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_policies( + self, + request: Optional[Union[backup_policy.ListBackupPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPoliciesPager: + r"""Returns list of all available backup policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_list_backup_policies(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.ListBackupPoliciesRequest, dict]): + The request object. ListBackupPoliciesRequest for + requesting multiple backup policies. + parent (str): + Required. Parent value for + ListBackupPoliciesRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.netapp_v1.services.net_app.pagers.ListBackupPoliciesPager: + ListBackupPoliciesResponse contains + all the backup policies requested. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_policy.ListBackupPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_policy.ListBackupPoliciesRequest): + request = backup_policy.ListBackupPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_policy( + self, + request: Optional[ + Union[gcn_backup_policy.UpdateBackupPolicyRequest, dict] + ] = None, + *, + backup_policy: Optional[gcn_backup_policy.BackupPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates settings of a specific backup policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_update_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupPolicyRequest( + ) + + # Make the request + operation = client.update_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.UpdateBackupPolicyRequest, dict]): + The request object. UpdateBackupPolicyRequest for + updating a backup policy. + backup_policy (google.cloud.netapp_v1.types.BackupPolicy): + Required. The backup policy being + updated + + This corresponds to the ``backup_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup Policy resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.netapp_v1.types.BackupPolicy` + Backup Policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcn_backup_policy.UpdateBackupPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcn_backup_policy.UpdateBackupPolicyRequest): + request = gcn_backup_policy.UpdateBackupPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_policy is not None: + request.backup_policy = backup_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_policy.name", request.backup_policy.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backup_policy.BackupPolicy, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_policy( + self, + request: Optional[Union[backup_policy.DeleteBackupPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Warning! This operation will permanently delete the + backup policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import netapp_v1 + + def sample_delete_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.netapp_v1.types.DeleteBackupPolicyRequest, dict]): + The request object. DeleteBackupPolicyRequest deletes a + backup policy. + name (str): + Required. The backup policy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a backup_policy.DeleteBackupPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup_policy.DeleteBackupPolicyRequest): + request = backup_policy.DeleteBackupPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_netapp_service.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "NetAppClient": return self diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py index 5365e6694820..4825c7aac348 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/pagers.py @@ -26,6 +26,9 @@ from google.cloud.netapp_v1.types import ( active_directory, + backup, + backup_policy, + backup_vault, kms, replication, snapshot, @@ -804,3 +807,387 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backup_vault.ListBackupVaultsResponse], + request: backup_vault.ListBackupVaultsRequest, + response: backup_vault.ListBackupVaultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupVaultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_vault.ListBackupVaultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup_vault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup_vault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsAsyncPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backup_vault.ListBackupVaultsResponse]], + request: backup_vault.ListBackupVaultsRequest, + response: backup_vault.ListBackupVaultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupVaultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_vault.ListBackupVaultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup_vault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[backup_vault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backup.ListBackupsResponse], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backup.ListBackupsResponse]], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[backup.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPoliciesPager: + """A pager for iterating through ``list_backup_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPolicies`` requests and continue to iterate + through the ``backup_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backup_policy.ListBackupPoliciesResponse], + request: backup_policy.ListBackupPoliciesRequest, + response: backup_policy.ListBackupPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupPoliciesRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_policy.ListBackupPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup_policy.ListBackupPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup_policy.BackupPolicy]: + for page in self.pages: + yield from page.backup_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPoliciesAsyncPager: + """A pager for iterating through ``list_backup_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.netapp_v1.types.ListBackupPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPolicies`` requests and continue to iterate + through the ``backup_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.netapp_v1.types.ListBackupPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backup_policy.ListBackupPoliciesResponse]], + request: backup_policy.ListBackupPoliciesRequest, + response: backup_policy.ListBackupPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.netapp_v1.types.ListBackupPoliciesRequest): + The initial request object. + response (google.cloud.netapp_v1.types.ListBackupPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_policy.ListBackupPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup_policy.ListBackupPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[backup_policy.BackupPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py index 343b6735bad4..5da5abb8af02 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/base.py @@ -29,6 +29,12 @@ from google.cloud.netapp_v1 import gapic_version as package_version from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -422,6 +428,135 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method.wrap_method( + self.create_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method.wrap_method( + self.get_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method.wrap_method( + self.list_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method.wrap_method( + self.delete_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_policy: gapic_v1.method.wrap_method( + self.create_backup_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_policy: gapic_v1.method.wrap_method( + self.get_backup_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_policies: gapic_v1.method.wrap_method( + self.list_backup_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_policy: gapic_v1.method.wrap_method( + self.update_backup_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_policy: gapic_v1.method.wrap_method( + self.delete_backup_policy, + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -774,6 +909,146 @@ def reverse_replication_direction( ]: raise NotImplementedError() + @property + def create_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.CreateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backup_vault.GetBackupVaultRequest], + Union[backup_vault.BackupVault, Awaitable[backup_vault.BackupVault]], + ]: + raise NotImplementedError() + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backup_vault.ListBackupVaultsRequest], + Union[ + backup_vault.ListBackupVaultsResponse, + Awaitable[backup_vault.ListBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.UpdateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backup_vault.DeleteBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup( + self, + ) -> Callable[ + [gcn_backup.CreateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [backup.GetBackupRequest], Union[backup.Backup, Awaitable[backup.Backup]] + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [backup.ListBackupsRequest], + Union[backup.ListBackupsResponse, Awaitable[backup.ListBackupsResponse]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [backup.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [gcn_backup.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.CreateBackupPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_policy( + self, + ) -> Callable[ + [backup_policy.GetBackupPolicyRequest], + Union[backup_policy.BackupPolicy, Awaitable[backup_policy.BackupPolicy]], + ]: + raise NotImplementedError() + + @property + def list_backup_policies( + self, + ) -> Callable[ + [backup_policy.ListBackupPoliciesRequest], + Union[ + backup_policy.ListBackupPoliciesResponse, + Awaitable[backup_policy.ListBackupPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.UpdateBackupPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_policy( + self, + ) -> Callable[ + [backup_policy.DeleteBackupPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py index e4a17878af36..20258f493c85 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc.py @@ -26,6 +26,12 @@ from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -1223,6 +1229,416 @@ def reverse_replication_direction( ) return self._stubs["reverse_replication_direction"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.CreateBackupVaultRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates new backup vault + + Returns: + Callable[[~.CreateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackupVault", + request_serializer=gcn_backup_vault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def get_backup_vault( + self, + ) -> Callable[[backup_vault.GetBackupVaultRequest], backup_vault.BackupVault]: + r"""Return a callable for the get backup vault method over gRPC. + + Returns the description of the specified backup vault + + Returns: + Callable[[~.GetBackupVaultRequest], + ~.BackupVault]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackupVault", + request_serializer=backup_vault.GetBackupVaultRequest.serialize, + response_deserializer=backup_vault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backup_vault.ListBackupVaultsRequest], backup_vault.ListBackupVaultsResponse + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Returns list of all available backup vaults. + + Returns: + Callable[[~.ListBackupVaultsRequest], + ~.ListBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackupVaults", + request_serializer=backup_vault.ListBackupVaultsRequest.serialize, + response_deserializer=backup_vault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.UpdateBackupVaultRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a specific backup vault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackupVault", + request_serializer=gcn_backup_vault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[[backup_vault.DeleteBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup vault method over gRPC. + + Warning! This operation will permanently delete the + backup vault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackupVault", + request_serializer=backup_vault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def create_backup( + self, + ) -> Callable[[gcn_backup.CreateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Creates a backup from the volume specified in the + request The backup can be created from the given + snapshot if specified in the request. If no snapshot + specified, there'll be a new snapshot taken to initiate + the backup creation. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackup", + request_serializer=gcn_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def get_backup(self) -> Callable[[backup.GetBackupRequest], backup.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Returns the description of the specified backup + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackup", + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def list_backups( + self, + ) -> Callable[[backup.ListBackupsRequest], backup.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Returns descriptions of all backups for a + backupVault. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackups", + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def delete_backup( + self, + ) -> Callable[[backup.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Warning! This operation will permanently delete the + backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackup", + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def update_backup( + self, + ) -> Callable[[gcn_backup.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Update backup with full spec. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackup", + request_serializer=gcn_backup.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def create_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.CreateBackupPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create backup policy method over gRPC. + + Creates new backup policy + + Returns: + Callable[[~.CreateBackupPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_policy" not in self._stubs: + self._stubs["create_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackupPolicy", + request_serializer=gcn_backup_policy.CreateBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_policy"] + + @property + def get_backup_policy( + self, + ) -> Callable[[backup_policy.GetBackupPolicyRequest], backup_policy.BackupPolicy]: + r"""Return a callable for the get backup policy method over gRPC. + + Returns the description of the specified backup policy by + backup_policy_id. + + Returns: + Callable[[~.GetBackupPolicyRequest], + ~.BackupPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_policy" not in self._stubs: + self._stubs["get_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackupPolicy", + request_serializer=backup_policy.GetBackupPolicyRequest.serialize, + response_deserializer=backup_policy.BackupPolicy.deserialize, + ) + return self._stubs["get_backup_policy"] + + @property + def list_backup_policies( + self, + ) -> Callable[ + [backup_policy.ListBackupPoliciesRequest], + backup_policy.ListBackupPoliciesResponse, + ]: + r"""Return a callable for the list backup policies method over gRPC. + + Returns list of all available backup policies. + + Returns: + Callable[[~.ListBackupPoliciesRequest], + ~.ListBackupPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_policies" not in self._stubs: + self._stubs["list_backup_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackupPolicies", + request_serializer=backup_policy.ListBackupPoliciesRequest.serialize, + response_deserializer=backup_policy.ListBackupPoliciesResponse.deserialize, + ) + return self._stubs["list_backup_policies"] + + @property + def update_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.UpdateBackupPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update backup policy method over gRPC. + + Updates settings of a specific backup policy. + + Returns: + Callable[[~.UpdateBackupPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_policy" not in self._stubs: + self._stubs["update_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackupPolicy", + request_serializer=gcn_backup_policy.UpdateBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_policy"] + + @property + def delete_backup_policy( + self, + ) -> Callable[[backup_policy.DeleteBackupPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup policy method over gRPC. + + Warning! This operation will permanently delete the + backup policy. + + Returns: + Callable[[~.DeleteBackupPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_policy" not in self._stubs: + self._stubs["delete_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackupPolicy", + request_serializer=backup_policy.DeleteBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_policy"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py index 987d73ec6565..a6769dbdd171 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/grpc_asyncio.py @@ -26,6 +26,12 @@ from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -1267,6 +1273,433 @@ def reverse_replication_direction( ) return self._stubs["reverse_replication_direction"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.CreateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates new backup vault + + Returns: + Callable[[~.CreateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackupVault", + request_serializer=gcn_backup_vault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backup_vault.GetBackupVaultRequest], Awaitable[backup_vault.BackupVault] + ]: + r"""Return a callable for the get backup vault method over gRPC. + + Returns the description of the specified backup vault + + Returns: + Callable[[~.GetBackupVaultRequest], + Awaitable[~.BackupVault]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackupVault", + request_serializer=backup_vault.GetBackupVaultRequest.serialize, + response_deserializer=backup_vault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backup_vault.ListBackupVaultsRequest], + Awaitable[backup_vault.ListBackupVaultsResponse], + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Returns list of all available backup vaults. + + Returns: + Callable[[~.ListBackupVaultsRequest], + Awaitable[~.ListBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackupVaults", + request_serializer=backup_vault.ListBackupVaultsRequest.serialize, + response_deserializer=backup_vault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.UpdateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a specific backup vault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackupVault", + request_serializer=gcn_backup_vault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backup_vault.DeleteBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup vault method over gRPC. + + Warning! This operation will permanently delete the + backup vault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackupVault", + request_serializer=backup_vault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def create_backup( + self, + ) -> Callable[ + [gcn_backup.CreateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup method over gRPC. + + Creates a backup from the volume specified in the + request The backup can be created from the given + snapshot if specified in the request. If no snapshot + specified, there'll be a new snapshot taken to initiate + the backup creation. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackup", + request_serializer=gcn_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def get_backup( + self, + ) -> Callable[[backup.GetBackupRequest], Awaitable[backup.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Returns the description of the specified backup + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackup", + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def list_backups( + self, + ) -> Callable[[backup.ListBackupsRequest], Awaitable[backup.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Returns descriptions of all backups for a + backupVault. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackups", + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def delete_backup( + self, + ) -> Callable[[backup.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete backup method over gRPC. + + Warning! This operation will permanently delete the + backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackup", + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def update_backup( + self, + ) -> Callable[ + [gcn_backup.UpdateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup method over gRPC. + + Update backup with full spec. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackup", + request_serializer=gcn_backup.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def create_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.CreateBackupPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backup policy method over gRPC. + + Creates new backup policy + + Returns: + Callable[[~.CreateBackupPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_policy" not in self._stubs: + self._stubs["create_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/CreateBackupPolicy", + request_serializer=gcn_backup_policy.CreateBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_policy"] + + @property + def get_backup_policy( + self, + ) -> Callable[ + [backup_policy.GetBackupPolicyRequest], Awaitable[backup_policy.BackupPolicy] + ]: + r"""Return a callable for the get backup policy method over gRPC. + + Returns the description of the specified backup policy by + backup_policy_id. + + Returns: + Callable[[~.GetBackupPolicyRequest], + Awaitable[~.BackupPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_policy" not in self._stubs: + self._stubs["get_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/GetBackupPolicy", + request_serializer=backup_policy.GetBackupPolicyRequest.serialize, + response_deserializer=backup_policy.BackupPolicy.deserialize, + ) + return self._stubs["get_backup_policy"] + + @property + def list_backup_policies( + self, + ) -> Callable[ + [backup_policy.ListBackupPoliciesRequest], + Awaitable[backup_policy.ListBackupPoliciesResponse], + ]: + r"""Return a callable for the list backup policies method over gRPC. + + Returns list of all available backup policies. + + Returns: + Callable[[~.ListBackupPoliciesRequest], + Awaitable[~.ListBackupPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_policies" not in self._stubs: + self._stubs["list_backup_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/ListBackupPolicies", + request_serializer=backup_policy.ListBackupPoliciesRequest.serialize, + response_deserializer=backup_policy.ListBackupPoliciesResponse.deserialize, + ) + return self._stubs["list_backup_policies"] + + @property + def update_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.UpdateBackupPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update backup policy method over gRPC. + + Updates settings of a specific backup policy. + + Returns: + Callable[[~.UpdateBackupPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_policy" not in self._stubs: + self._stubs["update_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/UpdateBackupPolicy", + request_serializer=gcn_backup_policy.UpdateBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_policy"] + + @property + def delete_backup_policy( + self, + ) -> Callable[ + [backup_policy.DeleteBackupPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup policy method over gRPC. + + Warning! This operation will permanently delete the + backup policy. + + Returns: + Callable[[~.DeleteBackupPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_policy" not in self._stubs: + self._stubs["delete_backup_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.netapp.v1.NetApp/DeleteBackupPolicy", + request_serializer=backup_policy.DeleteBackupPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_policy"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py index 3931ccd2f590..b55ca57df93c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/services/net_app/transports/rest.py @@ -47,6 +47,12 @@ from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -90,6 +96,30 @@ def post_create_active_directory(self, response): logging.log(f"Received response: {response}") return response + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -138,6 +168,30 @@ def post_delete_active_directory(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -194,6 +248,30 @@ def post_get_active_directory(self, response): logging.log(f"Received response: {response}") return response + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -242,6 +320,30 @@ def post_list_active_directories(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_kms_configs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -322,6 +424,30 @@ def post_update_active_directory(self, response): logging.log(f"Received response: {response}") return response + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_kms_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -401,6 +527,75 @@ def post_create_active_directory( """ return response + def pre_create_backup( + self, + request: gcn_backup.CreateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup.CreateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_create_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_create_backup_policy( + self, + request: gcn_backup_policy.CreateBackupPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup_policy.CreateBackupPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_create_backup_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_policy + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_create_backup_vault( + self, + request: gcn_backup_vault.CreateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup_vault.CreateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_create_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_create_kms_config( self, request: kms.CreateKmsConfigRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[kms.CreateKmsConfigRequest, Sequence[Tuple[str, str]]]: @@ -539,6 +734,73 @@ def post_delete_active_directory( """ return response + def pre_delete_backup( + self, request: backup.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backup.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_delete_backup_policy( + self, + request: backup_policy.DeleteBackupPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_policy.DeleteBackupPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_delete_backup_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_policy + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_delete_backup_vault( + self, + request: backup_vault.DeleteBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_vault.DeleteBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_delete_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_delete_kms_config( self, request: kms.DeleteKmsConfigRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[kms.DeleteKmsConfigRequest, Sequence[Tuple[str, str]]]: @@ -694,6 +956,71 @@ def post_get_active_directory( """ return response + def pre_get_backup( + self, request: backup.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backup.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_get_backup(self, response: backup.Backup) -> backup.Backup: + """Post-rpc interceptor for get_backup + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_get_backup_policy( + self, + request: backup_policy.GetBackupPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_policy.GetBackupPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_get_backup_policy( + self, response: backup_policy.BackupPolicy + ) -> backup_policy.BackupPolicy: + """Post-rpc interceptor for get_backup_policy + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_get_backup_vault( + self, + request: backup_vault.GetBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_vault.GetBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_get_backup_vault( + self, response: backup_vault.BackupVault + ) -> backup_vault.BackupVault: + """Post-rpc interceptor for get_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_get_kms_config( self, request: kms.GetKmsConfigRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[kms.GetKmsConfigRequest, Sequence[Tuple[str, str]]]: @@ -822,20 +1149,22 @@ def post_list_active_directories( """ return response - def pre_list_kms_configs( - self, request: kms.ListKmsConfigsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[kms.ListKmsConfigsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_kms_configs + def pre_list_backup_policies( + self, + request: backup_policy.ListBackupPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_policy.ListBackupPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_policies Override in a subclass to manipulate the request or metadata before they are sent to the NetApp server. """ return request, metadata - def post_list_kms_configs( - self, response: kms.ListKmsConfigsResponse - ) -> kms.ListKmsConfigsResponse: - """Post-rpc interceptor for list_kms_configs + def post_list_backup_policies( + self, response: backup_policy.ListBackupPoliciesResponse + ) -> backup_policy.ListBackupPoliciesResponse: + """Post-rpc interceptor for list_backup_policies Override in a subclass to manipulate the response after it is returned by the NetApp server but before @@ -843,22 +1172,20 @@ def post_list_kms_configs( """ return response - def pre_list_replications( - self, - request: replication.ListReplicationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[replication.ListReplicationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_replications + def pre_list_backups( + self, request: backup.ListBackupsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backup.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups Override in a subclass to manipulate the request or metadata before they are sent to the NetApp server. """ return request, metadata - def post_list_replications( - self, response: replication.ListReplicationsResponse - ) -> replication.ListReplicationsResponse: - """Post-rpc interceptor for list_replications + def post_list_backups( + self, response: backup.ListBackupsResponse + ) -> backup.ListBackupsResponse: + """Post-rpc interceptor for list_backups Override in a subclass to manipulate the response after it is returned by the NetApp server but before @@ -866,11 +1193,78 @@ def post_list_replications( """ return response - def pre_list_snapshots( + def pre_list_backup_vaults( self, - request: snapshot.ListSnapshotsRequest, + request: backup_vault.ListBackupVaultsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[snapshot.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + ) -> Tuple[backup_vault.ListBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_list_backup_vaults( + self, response: backup_vault.ListBackupVaultsResponse + ) -> backup_vault.ListBackupVaultsResponse: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_list_kms_configs( + self, request: kms.ListKmsConfigsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[kms.ListKmsConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_kms_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_list_kms_configs( + self, response: kms.ListKmsConfigsResponse + ) -> kms.ListKmsConfigsResponse: + """Post-rpc interceptor for list_kms_configs + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_list_replications( + self, + request: replication.ListReplicationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[replication.ListReplicationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_replications + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_list_replications( + self, response: replication.ListReplicationsResponse + ) -> replication.ListReplicationsResponse: + """Post-rpc interceptor for list_replications + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_list_snapshots( + self, + request: snapshot.ListSnapshotsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[snapshot.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_snapshots Override in a subclass to manipulate the request or metadata @@ -1050,6 +1444,75 @@ def post_update_active_directory( """ return response + def pre_update_backup( + self, + request: gcn_backup.UpdateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_update_backup_policy( + self, + request: gcn_backup_policy.UpdateBackupPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup_policy.UpdateBackupPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_update_backup_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_policy + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + + def pre_update_backup_vault( + self, + request: gcn_backup_vault.UpdateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcn_backup_vault.UpdateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetApp server. + """ + return request, metadata + + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the NetApp server but before + it is returned to user code. + """ + return response + def pre_update_kms_config( self, request: kms.UpdateKmsConfigRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[kms.UpdateKmsConfigRequest, Sequence[Tuple[str, str]]]: @@ -1570,12 +2033,12 @@ def __call__( resp = self._interceptor.post_create_active_directory(resp) return resp - class _CreateKmsConfig(NetAppRestStub): + class _CreateBackup(NetAppRestStub): def __hash__(self): - return hash("CreateKmsConfig") + return hash("CreateBackup") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "kmsConfigId": "", + "backupId": "", } @classmethod @@ -1588,18 +2051,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: kms.CreateKmsConfigRequest, + request: gcn_backup.CreateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create kms config method over HTTP. + r"""Call the create backup method over HTTP. Args: - request (~.kms.CreateKmsConfigRequest): - The request object. CreateKmsConfigRequest creates a KMS - Config. + request (~.gcn_backup.CreateBackupRequest): + The request object. CreateBackupRequest creates a backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1617,14 +2079,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/kmsConfigs", - "body": "kms_config", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/backups", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_create_kms_config( - request, metadata - ) - pb_request = kms.CreateKmsConfigRequest.pb(request) + request, metadata = self._interceptor.pre_create_backup(request, metadata) + pb_request = gcn_backup.CreateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1668,15 +2128,15 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_kms_config(resp) + resp = self._interceptor.post_create_backup(resp) return resp - class _CreateReplication(NetAppRestStub): + class _CreateBackupPolicy(NetAppRestStub): def __hash__(self): - return hash("CreateReplication") + return hash("CreateBackupPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "replicationId": "", + "backupPolicyId": "", } @classmethod @@ -1689,18 +2149,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcn_replication.CreateReplicationRequest, + request: gcn_backup_policy.CreateBackupPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create replication method over HTTP. + r"""Call the create backup policy method over HTTP. Args: - request (~.gcn_replication.CreateReplicationRequest): - The request object. CreateReplicationRequest creates a - replication. + request (~.gcn_backup_policy.CreateBackupPolicyRequest): + The request object. CreateBackupPolicyRequest creates a + backupPolicy. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1718,14 +2178,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/replications", - "body": "replication", + "uri": "/v1/{parent=projects/*/locations/*}/backupPolicies", + "body": "backup_policy", }, ] - request, metadata = self._interceptor.pre_create_replication( + request, metadata = self._interceptor.pre_create_backup_policy( request, metadata ) - pb_request = gcn_replication.CreateReplicationRequest.pb(request) + pb_request = gcn_backup_policy.CreateBackupPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1769,15 +2229,15 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_replication(resp) + resp = self._interceptor.post_create_backup_policy(resp) return resp - class _CreateSnapshot(NetAppRestStub): + class _CreateBackupVault(NetAppRestStub): def __hash__(self): - return hash("CreateSnapshot") + return hash("CreateBackupVault") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "snapshotId": "", + "backupVaultId": "", } @classmethod @@ -1790,18 +2250,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcn_snapshot.CreateSnapshotRequest, + request: gcn_backup_vault.CreateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create snapshot method over HTTP. + r"""Call the create backup vault method over HTTP. Args: - request (~.gcn_snapshot.CreateSnapshotRequest): - The request object. CreateSnapshotRequest creates a - snapshot. + request (~.gcn_backup_vault.CreateBackupVaultRequest): + The request object. CreateBackupVaultRequest creates a + backup vault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1819,12 +2279,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/snapshots", - "body": "snapshot", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_create_snapshot(request, metadata) - pb_request = gcn_snapshot.CreateSnapshotRequest.pb(request) + request, metadata = self._interceptor.pre_create_backup_vault( + request, metadata + ) + pb_request = gcn_backup_vault.CreateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1868,15 +2330,15 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_snapshot(resp) + resp = self._interceptor.post_create_backup_vault(resp) return resp - class _CreateStoragePool(NetAppRestStub): + class _CreateKmsConfig(NetAppRestStub): def __hash__(self): - return hash("CreateStoragePool") + return hash("CreateKmsConfig") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "storagePoolId": "", + "kmsConfigId": "", } @classmethod @@ -1889,18 +2351,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcn_storage_pool.CreateStoragePoolRequest, + request: kms.CreateKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create storage pool method over HTTP. + r"""Call the create kms config method over HTTP. Args: - request (~.gcn_storage_pool.CreateStoragePoolRequest): - The request object. CreateStoragePoolRequest creates a - Storage Pool. + request (~.kms.CreateKmsConfigRequest): + The request object. CreateKmsConfigRequest creates a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1918,14 +2380,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/storagePools", - "body": "storage_pool", + "uri": "/v1/{parent=projects/*/locations/*}/kmsConfigs", + "body": "kms_config", }, ] - request, metadata = self._interceptor.pre_create_storage_pool( + request, metadata = self._interceptor.pre_create_kms_config( request, metadata ) - pb_request = gcn_storage_pool.CreateStoragePoolRequest.pb(request) + pb_request = kms.CreateKmsConfigRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1969,15 +2431,15 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_storage_pool(resp) + resp = self._interceptor.post_create_kms_config(resp) return resp - class _CreateVolume(NetAppRestStub): + class _CreateReplication(NetAppRestStub): def __hash__(self): - return hash("CreateVolume") + return hash("CreateReplication") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "volumeId": "", + "replicationId": "", } @classmethod @@ -1990,17 +2452,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcn_volume.CreateVolumeRequest, + request: gcn_replication.CreateReplicationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create volume method over HTTP. + r"""Call the create replication method over HTTP. Args: - request (~.gcn_volume.CreateVolumeRequest): - The request object. Message for creating a Volume + request (~.gcn_replication.CreateReplicationRequest): + The request object. CreateReplicationRequest creates a + replication. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2018,12 +2481,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/volumes", - "body": "volume", + "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/replications", + "body": "replication", }, ] - request, metadata = self._interceptor.pre_create_volume(request, metadata) - pb_request = gcn_volume.CreateVolumeRequest.pb(request) + request, metadata = self._interceptor.pre_create_replication( + request, metadata + ) + pb_request = gcn_replication.CreateReplicationRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2067,14 +2532,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_volume(resp) + resp = self._interceptor.post_create_replication(resp) return resp - class _DeleteActiveDirectory(NetAppRestStub): + class _CreateSnapshot(NetAppRestStub): def __hash__(self): - return hash("DeleteActiveDirectory") + return hash("CreateSnapshot") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "snapshotId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2086,18 +2553,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: active_directory.DeleteActiveDirectoryRequest, + request: gcn_snapshot.CreateSnapshotRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete active directory method over HTTP. + r"""Call the create snapshot method over HTTP. Args: - request (~.active_directory.DeleteActiveDirectoryRequest): - The request object. DeleteActiveDirectoryRequest for - deleting a single active directory. + request (~.gcn_snapshot.CreateSnapshotRequest): + The request object. CreateSnapshotRequest creates a + snapshot. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2114,16 +2581,22 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/activeDirectories/*}", + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/snapshots", + "body": "snapshot", }, ] - request, metadata = self._interceptor.pre_delete_active_directory( - request, metadata - ) - pb_request = active_directory.DeleteActiveDirectoryRequest.pb(request) + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) + pb_request = gcn_snapshot.CreateSnapshotRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2147,6 +2620,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2157,14 +2631,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_active_directory(resp) + resp = self._interceptor.post_create_snapshot(resp) return resp - class _DeleteKmsConfig(NetAppRestStub): + class _CreateStoragePool(NetAppRestStub): def __hash__(self): - return hash("DeleteKmsConfig") + return hash("CreateStoragePool") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "storagePoolId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2176,18 +2652,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: kms.DeleteKmsConfigRequest, + request: gcn_storage_pool.CreateStoragePoolRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete kms config method over HTTP. + r"""Call the create storage pool method over HTTP. Args: - request (~.kms.DeleteKmsConfigRequest): - The request object. DeleteKmsConfigRequest deletes a KMS - Config. + request (~.gcn_storage_pool.CreateStoragePoolRequest): + The request object. CreateStoragePoolRequest creates a + Storage Pool. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2204,16 +2680,24 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}", + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/storagePools", + "body": "storage_pool", }, ] - request, metadata = self._interceptor.pre_delete_kms_config( + request, metadata = self._interceptor.pre_create_storage_pool( request, metadata ) - pb_request = kms.DeleteKmsConfigRequest.pb(request) + pb_request = gcn_storage_pool.CreateStoragePoolRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2237,6 +2721,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2247,14 +2732,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_kms_config(resp) + resp = self._interceptor.post_create_storage_pool(resp) return resp - class _DeleteReplication(NetAppRestStub): + class _CreateVolume(NetAppRestStub): def __hash__(self): - return hash("DeleteReplication") + return hash("CreateVolume") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "volumeId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2266,18 +2753,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.DeleteReplicationRequest, + request: gcn_volume.CreateVolumeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete replication method over HTTP. + r"""Call the create volume method over HTTP. Args: - request (~.replication.DeleteReplicationRequest): - The request object. DeleteReplicationRequest deletes a - replication. + request (~.gcn_volume.CreateVolumeRequest): + The request object. Message for creating a Volume retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2294,16 +2780,22 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}", + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/volumes", + "body": "volume", }, ] - request, metadata = self._interceptor.pre_delete_replication( - request, metadata - ) - pb_request = replication.DeleteReplicationRequest.pb(request) + request, metadata = self._interceptor.pre_create_volume(request, metadata) + pb_request = gcn_volume.CreateVolumeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2327,6 +2819,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2337,12 +2830,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_replication(resp) + resp = self._interceptor.post_create_volume(resp) return resp - class _DeleteSnapshot(NetAppRestStub): + class _DeleteActiveDirectory(NetAppRestStub): def __hash__(self): - return hash("DeleteSnapshot") + return hash("DeleteActiveDirectory") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2356,18 +2849,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: snapshot.DeleteSnapshotRequest, + request: active_directory.DeleteActiveDirectoryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete snapshot method over HTTP. + r"""Call the delete active directory method over HTTP. Args: - request (~.snapshot.DeleteSnapshotRequest): - The request object. DeleteSnapshotRequest deletes a - snapshot. + request (~.active_directory.DeleteActiveDirectoryRequest): + The request object. DeleteActiveDirectoryRequest for + deleting a single active directory. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2385,11 +2878,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}", + "uri": "/v1/{name=projects/*/locations/*/activeDirectories/*}", }, ] - request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) - pb_request = snapshot.DeleteSnapshotRequest.pb(request) + request, metadata = self._interceptor.pre_delete_active_directory( + request, metadata + ) + pb_request = active_directory.DeleteActiveDirectoryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2425,12 +2920,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_snapshot(resp) + resp = self._interceptor.post_delete_active_directory(resp) return resp - class _DeleteStoragePool(NetAppRestStub): + class _DeleteBackup(NetAppRestStub): def __hash__(self): - return hash("DeleteStoragePool") + return hash("DeleteBackup") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2444,18 +2939,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: storage_pool.DeleteStoragePoolRequest, + request: backup.DeleteBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete storage pool method over HTTP. + r"""Call the delete backup method over HTTP. Args: - request (~.storage_pool.DeleteStoragePoolRequest): - The request object. DeleteStoragePoolRequest deletes a - Storage Pool. + request (~.backup.DeleteBackupRequest): + The request object. DeleteBackupRequest deletes a backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2473,13 +2967,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/storagePools/*}", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}", }, ] - request, metadata = self._interceptor.pre_delete_storage_pool( - request, metadata - ) - pb_request = storage_pool.DeleteStoragePoolRequest.pb(request) + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = backup.DeleteBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2515,12 +3007,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_storage_pool(resp) + resp = self._interceptor.post_delete_backup(resp) return resp - class _DeleteVolume(NetAppRestStub): + class _DeleteBackupPolicy(NetAppRestStub): def __hash__(self): - return hash("DeleteVolume") + return hash("DeleteBackupPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2534,17 +3026,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.DeleteVolumeRequest, + request: backup_policy.DeleteBackupPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete volume method over HTTP. + r"""Call the delete backup policy method over HTTP. Args: - request (~.volume.DeleteVolumeRequest): - The request object. Message for deleting a Volume + request (~.backup_policy.DeleteBackupPolicyRequest): + The request object. DeleteBackupPolicyRequest deletes a + backup policy. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2562,11 +3055,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/volumes/*}", + "uri": "/v1/{name=projects/*/locations/*/backupPolicies/*}", }, ] - request, metadata = self._interceptor.pre_delete_volume(request, metadata) - pb_request = volume.DeleteVolumeRequest.pb(request) + request, metadata = self._interceptor.pre_delete_backup_policy( + request, metadata + ) + pb_request = backup_policy.DeleteBackupPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2602,12 +3097,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_volume(resp) + resp = self._interceptor.post_delete_backup_policy(resp) return resp - class _EncryptVolumes(NetAppRestStub): + class _DeleteBackupVault(NetAppRestStub): def __hash__(self): - return hash("EncryptVolumes") + return hash("DeleteBackupVault") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2621,18 +3116,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: kms.EncryptVolumesRequest, + request: backup_vault.DeleteBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the encrypt volumes method over HTTP. + r"""Call the delete backup vault method over HTTP. Args: - request (~.kms.EncryptVolumesRequest): - The request object. EncryptVolumesRequest specifies the - KMS config to encrypt existing volumes. + request (~.backup_vault.DeleteBackupVaultRequest): + The request object. DeleteBackupVaultRequest deletes a + backupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2649,22 +3144,16 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}:encrypt", - "body": "*", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", }, ] - request, metadata = self._interceptor.pre_encrypt_volumes(request, metadata) - pb_request = kms.EncryptVolumesRequest.pb(request) + request, metadata = self._interceptor.pre_delete_backup_vault( + request, metadata + ) + pb_request = backup_vault.DeleteBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2688,7 +3177,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2699,12 +3187,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_encrypt_volumes(resp) + resp = self._interceptor.post_delete_backup_vault(resp) return resp - class _GetActiveDirectory(NetAppRestStub): + class _DeleteKmsConfig(NetAppRestStub): def __hash__(self): - return hash("GetActiveDirectory") + return hash("DeleteKmsConfig") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2718,18 +3206,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: active_directory.GetActiveDirectoryRequest, + request: kms.DeleteKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> active_directory.ActiveDirectory: - r"""Call the get active directory method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete kms config method over HTTP. Args: - request (~.active_directory.GetActiveDirectoryRequest): - The request object. GetActiveDirectory for getting a - single active directory. + request (~.kms.DeleteKmsConfigRequest): + The request object. DeleteKmsConfigRequest deletes a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2737,23 +3225,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.active_directory.ActiveDirectory: - ActiveDirectory is the public - representation of the active directory - config. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/activeDirectories/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}", }, ] - request, metadata = self._interceptor.pre_get_active_directory( + request, metadata = self._interceptor.pre_delete_kms_config( request, metadata ) - pb_request = active_directory.GetActiveDirectoryRequest.pb(request) + pb_request = kms.DeleteKmsConfigRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2787,16 +3275,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = active_directory.ActiveDirectory() - pb_resp = active_directory.ActiveDirectory.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_active_directory(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_kms_config(resp) return resp - class _GetKmsConfig(NetAppRestStub): + class _DeleteReplication(NetAppRestStub): def __hash__(self): - return hash("GetKmsConfig") + return hash("DeleteReplication") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2810,18 +3296,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: kms.GetKmsConfigRequest, + request: replication.DeleteReplicationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> kms.KmsConfig: - r"""Call the get kms config method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete replication method over HTTP. Args: - request (~.kms.GetKmsConfigRequest): - The request object. GetKmsConfigRequest gets a KMS - Config. + request (~.replication.DeleteReplicationRequest): + The request object. DeleteReplicationRequest deletes a + replication. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2829,20 +3315,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.kms.KmsConfig: - KmsConfig is the customer managed - encryption key(CMEK) configuration. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}", }, ] - request, metadata = self._interceptor.pre_get_kms_config(request, metadata) - pb_request = kms.GetKmsConfigRequest.pb(request) + request, metadata = self._interceptor.pre_delete_replication( + request, metadata + ) + pb_request = replication.DeleteReplicationRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2876,16 +3365,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = kms.KmsConfig() - pb_resp = kms.KmsConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_kms_config(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_replication(resp) return resp - class _GetReplication(NetAppRestStub): + class _DeleteSnapshot(NetAppRestStub): def __hash__(self): - return hash("GetReplication") + return hash("DeleteSnapshot") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2899,18 +3386,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.GetReplicationRequest, + request: snapshot.DeleteSnapshotRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> replication.Replication: - r"""Call the get replication method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete snapshot method over HTTP. Args: - request (~.replication.GetReplicationRequest): - The request object. GetReplicationRequest gets the state - of a replication. + request (~.snapshot.DeleteSnapshotRequest): + The request object. DeleteSnapshotRequest deletes a + snapshot. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2918,22 +3405,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.replication.Replication: - Replication is a nested resource - under Volume, that describes a - cross-region replication relationship - between 2 volumes in different regions. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}", }, ] - request, metadata = self._interceptor.pre_get_replication(request, metadata) - pb_request = replication.GetReplicationRequest.pb(request) + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + pb_request = snapshot.DeleteSnapshotRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2967,16 +3453,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = replication.Replication() - pb_resp = replication.Replication.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_replication(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_snapshot(resp) return resp - class _GetSnapshot(NetAppRestStub): + class _DeleteStoragePool(NetAppRestStub): def __hash__(self): - return hash("GetSnapshot") + return hash("DeleteStoragePool") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2990,18 +3474,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: snapshot.GetSnapshotRequest, + request: storage_pool.DeleteStoragePoolRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshot.Snapshot: - r"""Call the get snapshot method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete storage pool method over HTTP. Args: - request (~.snapshot.GetSnapshotRequest): - The request object. GetSnapshotRequest gets the state of - a snapshot. + request (~.storage_pool.DeleteStoragePoolRequest): + The request object. DeleteStoragePoolRequest deletes a + Storage Pool. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3009,20 +3493,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.snapshot.Snapshot: - Snapshot is a point-in-time version - of a Volume's content. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/storagePools/*}", }, ] - request, metadata = self._interceptor.pre_get_snapshot(request, metadata) - pb_request = snapshot.GetSnapshotRequest.pb(request) + request, metadata = self._interceptor.pre_delete_storage_pool( + request, metadata + ) + pb_request = storage_pool.DeleteStoragePoolRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3056,16 +3543,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = snapshot.Snapshot() - pb_resp = snapshot.Snapshot.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_snapshot(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_storage_pool(resp) return resp - class _GetStoragePool(NetAppRestStub): + class _DeleteVolume(NetAppRestStub): def __hash__(self): - return hash("GetStoragePool") + return hash("DeleteVolume") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3079,18 +3564,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: storage_pool.GetStoragePoolRequest, + request: volume.DeleteVolumeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> storage_pool.StoragePool: - r"""Call the get storage pool method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the delete volume method over HTTP. Args: - request (~.storage_pool.GetStoragePoolRequest): - The request object. GetStoragePoolRequest gets a Storage - Pool. + request (~.volume.DeleteVolumeRequest): + The request object. Message for deleting a Volume retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3098,26 +3582,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.storage_pool.StoragePool: - StoragePool is a container for - volumes with a service level and - capacity. Volumes can be created in a - pool of sufficient available capacity. - StoragePool capacity is what you are - billed for. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/storagePools/*}", + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/volumes/*}", }, ] - request, metadata = self._interceptor.pre_get_storage_pool( - request, metadata - ) - pb_request = storage_pool.GetStoragePoolRequest.pb(request) + request, metadata = self._interceptor.pre_delete_volume(request, metadata) + pb_request = volume.DeleteVolumeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3151,16 +3630,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = storage_pool.StoragePool() - pb_resp = storage_pool.StoragePool.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_storage_pool(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_volume(resp) return resp - class _GetVolume(NetAppRestStub): + class _EncryptVolumes(NetAppRestStub): def __hash__(self): - return hash("GetVolume") + return hash("EncryptVolumes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3174,17 +3651,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.GetVolumeRequest, + request: kms.EncryptVolumesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.Volume: - r"""Call the get volume method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the encrypt volumes method over HTTP. Args: - request (~.volume.GetVolumeRequest): - The request object. Message for getting a Volume + request (~.kms.EncryptVolumesRequest): + The request object. EncryptVolumesRequest specifies the + KMS config to encrypt existing volumes. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3192,22 +3670,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.volume.Volume: - Volume provides a filesystem that you - can mount. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/volumes/*}", + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}:encrypt", + "body": "*", }, ] - request, metadata = self._interceptor.pre_get_volume(request, metadata) - pb_request = volume.GetVolumeRequest.pb(request) + request, metadata = self._interceptor.pre_encrypt_volumes(request, metadata) + pb_request = kms.EncryptVolumesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3231,6 +3718,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3239,16 +3727,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = volume.Volume() - pb_resp = volume.Volume.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_volume(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_encrypt_volumes(resp) return resp - class _ListActiveDirectories(NetAppRestStub): + class _GetActiveDirectory(NetAppRestStub): def __hash__(self): - return hash("ListActiveDirectories") + return hash("GetActiveDirectory") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3262,18 +3748,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: active_directory.ListActiveDirectoriesRequest, + request: active_directory.GetActiveDirectoryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> active_directory.ListActiveDirectoriesResponse: - r"""Call the list active directories method over HTTP. + ) -> active_directory.ActiveDirectory: + r"""Call the get active directory method over HTTP. Args: - request (~.active_directory.ListActiveDirectoriesRequest): - The request object. ListActiveDirectoriesRequest for - requesting multiple active directories. + request (~.active_directory.GetActiveDirectoryRequest): + The request object. GetActiveDirectory for getting a + single active directory. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3281,23 +3767,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.active_directory.ListActiveDirectoriesResponse: - ListActiveDirectoriesResponse - contains all the active directories - requested. + ~.active_directory.ActiveDirectory: + ActiveDirectory is the public + representation of the active directory + config. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/activeDirectories", + "uri": "/v1/{name=projects/*/locations/*/activeDirectories/*}", }, ] - request, metadata = self._interceptor.pre_list_active_directories( + request, metadata = self._interceptor.pre_get_active_directory( request, metadata ) - pb_request = active_directory.ListActiveDirectoriesRequest.pb(request) + pb_request = active_directory.GetActiveDirectoryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3331,16 +3817,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = active_directory.ListActiveDirectoriesResponse() - pb_resp = active_directory.ListActiveDirectoriesResponse.pb(resp) + resp = active_directory.ActiveDirectory() + pb_resp = active_directory.ActiveDirectory.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_active_directories(resp) + resp = self._interceptor.post_get_active_directory(resp) return resp - class _ListKmsConfigs(NetAppRestStub): + class _GetBackup(NetAppRestStub): def __hash__(self): - return hash("ListKmsConfigs") + return hash("GetBackup") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3354,18 +3840,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: kms.ListKmsConfigsRequest, + request: backup.GetBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> kms.ListKmsConfigsResponse: - r"""Call the list kms configs method over HTTP. + ) -> backup.Backup: + r"""Call the get backup method over HTTP. Args: - request (~.kms.ListKmsConfigsRequest): - The request object. ListKmsConfigsRequest lists KMS - Configs. + request (~.backup.GetBackupRequest): + The request object. GetBackupRequest gets the state of a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3373,22 +3859,18 @@ def __call__( sent along with the request as metadata. Returns: - ~.kms.ListKmsConfigsResponse: - ListKmsConfigsResponse is the - response to a ListKmsConfigsRequest. - + ~.backup.Backup: + A NetApp Backup. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/kmsConfigs", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}", }, ] - request, metadata = self._interceptor.pre_list_kms_configs( - request, metadata - ) - pb_request = kms.ListKmsConfigsRequest.pb(request) + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backup.GetBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3422,16 +3904,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = kms.ListKmsConfigsResponse() - pb_resp = kms.ListKmsConfigsResponse.pb(resp) + resp = backup.Backup() + pb_resp = backup.Backup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_kms_configs(resp) + resp = self._interceptor.post_get_backup(resp) return resp - class _ListReplications(NetAppRestStub): + class _GetBackupPolicy(NetAppRestStub): def __hash__(self): - return hash("ListReplications") + return hash("GetBackupPolicy") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3445,17 +3927,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.ListReplicationsRequest, + request: backup_policy.GetBackupPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> replication.ListReplicationsResponse: - r"""Call the list replications method over HTTP. + ) -> backup_policy.BackupPolicy: + r"""Call the get backup policy method over HTTP. Args: - request (~.replication.ListReplicationsRequest): - The request object. ListReplications lists replications. + request (~.backup_policy.GetBackupPolicyRequest): + The request object. GetBackupPolicyRequest gets the state + of a backupPolicy. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3463,22 +3946,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.replication.ListReplicationsResponse: - ListReplicationsResponse is the - result of ListReplicationsRequest. - + ~.backup_policy.BackupPolicy: + Backup Policy. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/replications", + "uri": "/v1/{name=projects/*/locations/*/backupPolicies/*}", }, ] - request, metadata = self._interceptor.pre_list_replications( + request, metadata = self._interceptor.pre_get_backup_policy( request, metadata ) - pb_request = replication.ListReplicationsRequest.pb(request) + pb_request = backup_policy.GetBackupPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3512,16 +3993,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = replication.ListReplicationsResponse() - pb_resp = replication.ListReplicationsResponse.pb(resp) + resp = backup_policy.BackupPolicy() + pb_resp = backup_policy.BackupPolicy.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_replications(resp) + resp = self._interceptor.post_get_backup_policy(resp) return resp - class _ListSnapshots(NetAppRestStub): + class _GetBackupVault(NetAppRestStub): def __hash__(self): - return hash("ListSnapshots") + return hash("GetBackupVault") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3535,17 +4016,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: snapshot.ListSnapshotsRequest, + request: backup_vault.GetBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshot.ListSnapshotsResponse: - r"""Call the list snapshots method over HTTP. + ) -> backup_vault.BackupVault: + r"""Call the get backup vault method over HTTP. Args: - request (~.snapshot.ListSnapshotsRequest): - The request object. ListSnapshotsRequest lists snapshots. + request (~.backup_vault.GetBackupVaultRequest): + The request object. GetBackupVaultRequest gets the state + of a backupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3553,20 +4035,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.snapshot.ListSnapshotsResponse: - ListSnapshotsResponse is the result - of ListSnapshotsRequest. - + ~.backup_vault.BackupVault: + A NetApp BackupVault. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/snapshots", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", }, ] - request, metadata = self._interceptor.pre_list_snapshots(request, metadata) - pb_request = snapshot.ListSnapshotsRequest.pb(request) + request, metadata = self._interceptor.pre_get_backup_vault( + request, metadata + ) + pb_request = backup_vault.GetBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3600,16 +4082,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = snapshot.ListSnapshotsResponse() - pb_resp = snapshot.ListSnapshotsResponse.pb(resp) + resp = backup_vault.BackupVault() + pb_resp = backup_vault.BackupVault.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_snapshots(resp) + resp = self._interceptor.post_get_backup_vault(resp) return resp - class _ListStoragePools(NetAppRestStub): + class _GetKmsConfig(NetAppRestStub): def __hash__(self): - return hash("ListStoragePools") + return hash("GetKmsConfig") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3623,18 +4105,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: storage_pool.ListStoragePoolsRequest, + request: kms.GetKmsConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> storage_pool.ListStoragePoolsResponse: - r"""Call the list storage pools method over HTTP. + ) -> kms.KmsConfig: + r"""Call the get kms config method over HTTP. Args: - request (~.storage_pool.ListStoragePoolsRequest): - The request object. ListStoragePoolsRequest lists Storage - Pools. + request (~.kms.GetKmsConfigRequest): + The request object. GetKmsConfigRequest gets a KMS + Config. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3642,22 +4124,111 @@ def __call__( sent along with the request as metadata. Returns: - ~.storage_pool.ListStoragePoolsResponse: - ListStoragePoolsResponse is the - response to a ListStoragePoolsRequest. + ~.kms.KmsConfig: + KmsConfig is the customer managed + encryption key(CMEK) configuration. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/storagePools", + "uri": "/v1/{name=projects/*/locations/*/kmsConfigs/*}", }, ] - request, metadata = self._interceptor.pre_list_storage_pools( - request, metadata + request, metadata = self._interceptor.pre_get_kms_config(request, metadata) + pb_request = kms.GetKmsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) ) - pb_request = storage_pool.ListStoragePoolsRequest.pb(request) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = kms.KmsConfig() + pb_resp = kms.KmsConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_kms_config(resp) + return resp + + class _GetReplication(NetAppRestStub): + def __hash__(self): + return hash("GetReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: replication.GetReplicationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> replication.Replication: + r"""Call the get replication method over HTTP. + + Args: + request (~.replication.GetReplicationRequest): + The request object. GetReplicationRequest gets the state + of a replication. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.replication.Replication: + Replication is a nested resource + under Volume, that describes a + cross-region replication relationship + between 2 volumes in different regions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}", + }, + ] + request, metadata = self._interceptor.pre_get_replication(request, metadata) + pb_request = replication.GetReplicationRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3691,16 +4262,1300 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = storage_pool.ListStoragePoolsResponse() - pb_resp = storage_pool.ListStoragePoolsResponse.pb(resp) + resp = replication.Replication() + pb_resp = replication.Replication.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_storage_pools(resp) + resp = self._interceptor.post_get_replication(resp) return resp - class _ListVolumes(NetAppRestStub): + class _GetSnapshot(NetAppRestStub): def __hash__(self): - return hash("ListVolumes") + return hash("GetSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: snapshot.GetSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshot.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.snapshot.GetSnapshotRequest): + The request object. GetSnapshotRequest gets the state of + a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshot.Snapshot: + Snapshot is a point-in-time version + of a Volume's content. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}", + }, + ] + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + pb_request = snapshot.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshot.Snapshot() + pb_resp = snapshot.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + return resp + + class _GetStoragePool(NetAppRestStub): + def __hash__(self): + return hash("GetStoragePool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storage_pool.GetStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storage_pool.StoragePool: + r"""Call the get storage pool method over HTTP. + + Args: + request (~.storage_pool.GetStoragePoolRequest): + The request object. GetStoragePoolRequest gets a Storage + Pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storage_pool.StoragePool: + StoragePool is a container for + volumes with a service level and + capacity. Volumes can be created in a + pool of sufficient available capacity. + StoragePool capacity is what you are + billed for. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/storagePools/*}", + }, + ] + request, metadata = self._interceptor.pre_get_storage_pool( + request, metadata + ) + pb_request = storage_pool.GetStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_pool.StoragePool() + pb_resp = storage_pool.StoragePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_storage_pool(resp) + return resp + + class _GetVolume(NetAppRestStub): + def __hash__(self): + return hash("GetVolume") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume.GetVolumeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""Call the get volume method over HTTP. + + Args: + request (~.volume.GetVolumeRequest): + The request object. Message for getting a Volume + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.volume.Volume: + Volume provides a filesystem that you + can mount. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/volumes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_volume(request, metadata) + pb_request = volume.GetVolumeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = volume.Volume() + pb_resp = volume.Volume.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_volume(resp) + return resp + + class _ListActiveDirectories(NetAppRestStub): + def __hash__(self): + return hash("ListActiveDirectories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: active_directory.ListActiveDirectoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> active_directory.ListActiveDirectoriesResponse: + r"""Call the list active directories method over HTTP. + + Args: + request (~.active_directory.ListActiveDirectoriesRequest): + The request object. ListActiveDirectoriesRequest for + requesting multiple active directories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.active_directory.ListActiveDirectoriesResponse: + ListActiveDirectoriesResponse + contains all the active directories + requested. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/activeDirectories", + }, + ] + request, metadata = self._interceptor.pre_list_active_directories( + request, metadata + ) + pb_request = active_directory.ListActiveDirectoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = active_directory.ListActiveDirectoriesResponse() + pb_resp = active_directory.ListActiveDirectoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_active_directories(resp) + return resp + + class _ListBackupPolicies(NetAppRestStub): + def __hash__(self): + return hash("ListBackupPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backup_policy.ListBackupPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_policy.ListBackupPoliciesResponse: + r"""Call the list backup policies method over HTTP. + + Args: + request (~.backup_policy.ListBackupPoliciesRequest): + The request object. ListBackupPoliciesRequest for + requesting multiple backup policies. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backup_policy.ListBackupPoliciesResponse: + ListBackupPoliciesResponse contains + all the backup policies requested. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_backup_policies( + request, metadata + ) + pb_request = backup_policy.ListBackupPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_policy.ListBackupPoliciesResponse() + pb_resp = backup_policy.ListBackupPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_policies(resp) + return resp + + class _ListBackups(NetAppRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backup.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backup.ListBackupsRequest): + The request object. ListBackupsRequest lists backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backup.ListBackupsResponse: + ListBackupsResponse is the result of + ListBackupsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = backup.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.ListBackupsResponse() + pb_resp = backup.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupVaults(NetAppRestStub): + def __hash__(self): + return hash("ListBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backup_vault.ListBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_vault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. + + Args: + request (~.backup_vault.ListBackupVaultsRequest): + The request object. ListBackupVaultsRequest lists + backupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backup_vault.ListBackupVaultsResponse: + ListBackupVaultsResponse is the + result of ListBackupVaultsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + }, + ] + request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata + ) + pb_request = backup_vault.ListBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_vault.ListBackupVaultsResponse() + pb_resp = backup_vault.ListBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_vaults(resp) + return resp + + class _ListKmsConfigs(NetAppRestStub): + def __hash__(self): + return hash("ListKmsConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: kms.ListKmsConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> kms.ListKmsConfigsResponse: + r"""Call the list kms configs method over HTTP. + + Args: + request (~.kms.ListKmsConfigsRequest): + The request object. ListKmsConfigsRequest lists KMS + Configs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.kms.ListKmsConfigsResponse: + ListKmsConfigsResponse is the + response to a ListKmsConfigsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/kmsConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_kms_configs( + request, metadata + ) + pb_request = kms.ListKmsConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = kms.ListKmsConfigsResponse() + pb_resp = kms.ListKmsConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_kms_configs(resp) + return resp + + class _ListReplications(NetAppRestStub): + def __hash__(self): + return hash("ListReplications") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: replication.ListReplicationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> replication.ListReplicationsResponse: + r"""Call the list replications method over HTTP. + + Args: + request (~.replication.ListReplicationsRequest): + The request object. ListReplications lists replications. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.replication.ListReplicationsResponse: + ListReplicationsResponse is the + result of ListReplicationsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/replications", + }, + ] + request, metadata = self._interceptor.pre_list_replications( + request, metadata + ) + pb_request = replication.ListReplicationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = replication.ListReplicationsResponse() + pb_resp = replication.ListReplicationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_replications(resp) + return resp + + class _ListSnapshots(NetAppRestStub): + def __hash__(self): + return hash("ListSnapshots") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: snapshot.ListSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshot.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.snapshot.ListSnapshotsRequest): + The request object. ListSnapshotsRequest lists snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshot.ListSnapshotsResponse: + ListSnapshotsResponse is the result + of ListSnapshotsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/volumes/*}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + pb_request = snapshot.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshot.ListSnapshotsResponse() + pb_resp = snapshot.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + return resp + + class _ListStoragePools(NetAppRestStub): + def __hash__(self): + return hash("ListStoragePools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: storage_pool.ListStoragePoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> storage_pool.ListStoragePoolsResponse: + r"""Call the list storage pools method over HTTP. + + Args: + request (~.storage_pool.ListStoragePoolsRequest): + The request object. ListStoragePoolsRequest lists Storage + Pools. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.storage_pool.ListStoragePoolsResponse: + ListStoragePoolsResponse is the + response to a ListStoragePoolsRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/storagePools", + }, + ] + request, metadata = self._interceptor.pre_list_storage_pools( + request, metadata + ) + pb_request = storage_pool.ListStoragePoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = storage_pool.ListStoragePoolsResponse() + pb_resp = storage_pool.ListStoragePoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_storage_pools(resp) + return resp + + class _ListVolumes(NetAppRestStub): + def __hash__(self): + return hash("ListVolumes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume.ListVolumesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.ListVolumesResponse: + r"""Call the list volumes method over HTTP. + + Args: + request (~.volume.ListVolumesRequest): + The request object. Message for requesting list of + Volumes + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.volume.ListVolumesResponse: + Message for response to listing + Volumes + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/volumes", + }, + ] + request, metadata = self._interceptor.pre_list_volumes(request, metadata) + pb_request = volume.ListVolumesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = volume.ListVolumesResponse() + pb_resp = volume.ListVolumesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_volumes(resp) + return resp + + class _ResumeReplication(NetAppRestStub): + def __hash__(self): + return hash("ResumeReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: replication.ResumeReplicationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the resume replication method over HTTP. + + Args: + request (~.replication.ResumeReplicationRequest): + The request object. ResumeReplicationRequest resumes a + stopped replication. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_replication( + request, metadata + ) + pb_request = replication.ResumeReplicationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_replication(resp) + return resp + + class _ReverseReplicationDirection(NetAppRestStub): + def __hash__(self): + return hash("ReverseReplicationDirection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: replication.ReverseReplicationDirectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the reverse replication + direction method over HTTP. + + Args: + request (~.replication.ReverseReplicationDirectionRequest): + The request object. ReverseReplicationDirectionRequest + reverses direction of replication. + Source becomes destination and + destination becomes source. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:reverseDirection", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reverse_replication_direction( + request, metadata + ) + pb_request = replication.ReverseReplicationDirectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reverse_replication_direction(resp) + return resp + + class _RevertVolume(NetAppRestStub): + def __hash__(self): + return hash("RevertVolume") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3714,18 +5569,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.ListVolumesRequest, + request: volume.RevertVolumeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.ListVolumesResponse: - r"""Call the list volumes method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the revert volume method over HTTP. Args: - request (~.volume.ListVolumesRequest): - The request object. Message for requesting list of - Volumes + request (~.volume.RevertVolumeRequest): + The request object. RevertVolumeRequest reverts the given + volume to the specified snapshot. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3733,22 +5588,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.volume.ListVolumesResponse: - Message for response to listing - Volumes + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/volumes", + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/volumes/*}:revert", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_volumes(request, metadata) - pb_request = volume.ListVolumesRequest.pb(request) + request, metadata = self._interceptor.pre_revert_volume(request, metadata) + pb_request = volume.RevertVolumeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3772,6 +5636,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3780,16 +5645,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = volume.ListVolumesResponse() - pb_resp = volume.ListVolumesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_volumes(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_revert_volume(resp) return resp - class _ResumeReplication(NetAppRestStub): + class _StopReplication(NetAppRestStub): def __hash__(self): - return hash("ResumeReplication") + return hash("StopReplication") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3803,18 +5666,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.ResumeReplicationRequest, + request: replication.StopReplicationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the resume replication method over HTTP. + r"""Call the stop replication method over HTTP. Args: - request (~.replication.ResumeReplicationRequest): - The request object. ResumeReplicationRequest resumes a - stopped replication. + request (~.replication.StopReplicationRequest): + The request object. StopReplicationRequest stops a + replication until resumed. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3832,14 +5695,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:resume", + "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:stop", "body": "*", }, ] - request, metadata = self._interceptor.pre_resume_replication( + request, metadata = self._interceptor.pre_stop_replication( request, metadata ) - pb_request = replication.ResumeReplicationRequest.pb(request) + pb_request = replication.StopReplicationRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -3883,14 +5746,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_resume_replication(resp) + resp = self._interceptor.post_stop_replication(resp) return resp - class _ReverseReplicationDirection(NetAppRestStub): + class _UpdateActiveDirectory(NetAppRestStub): def __hash__(self): - return hash("ReverseReplicationDirection") + return hash("UpdateActiveDirectory") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -3902,46 +5767,43 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.ReverseReplicationDirectionRequest, + request: gcn_active_directory.UpdateActiveDirectoryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the reverse replication - direction method over HTTP. + r"""Call the update active directory method over HTTP. - Args: - request (~.replication.ReverseReplicationDirectionRequest): - The request object. ReverseReplicationDirectionRequest - reverses direction of replication. - Source becomes destination and - destination becomes source. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.gcn_active_directory.UpdateActiveDirectoryRequest): + The request object. UpdateActiveDirectoryRequest for + updating an active directory. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:reverseDirection", - "body": "*", + "method": "patch", + "uri": "/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}", + "body": "active_directory", }, ] - request, metadata = self._interceptor.pre_reverse_replication_direction( + request, metadata = self._interceptor.pre_update_active_directory( request, metadata ) - pb_request = replication.ReverseReplicationDirectionRequest.pb(request) + pb_request = gcn_active_directory.UpdateActiveDirectoryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -3985,14 +5847,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reverse_replication_direction(resp) + resp = self._interceptor.post_update_active_directory(resp) return resp - class _RevertVolume(NetAppRestStub): + class _UpdateBackup(NetAppRestStub): def __hash__(self): - return hash("RevertVolume") + return hash("UpdateBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4004,18 +5868,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.RevertVolumeRequest, + request: gcn_backup.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the revert volume method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.volume.RevertVolumeRequest): - The request object. RevertVolumeRequest reverts the given - volume to the specified snapshot. + request (~.gcn_backup.UpdateBackupRequest): + The request object. UpdateBackupRequest updates + description and/or labels for a backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4032,13 +5896,13 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/volumes/*}:revert", - "body": "*", + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backupVaults/*/backups/*}", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_revert_volume(request, metadata) - pb_request = volume.RevertVolumeRequest.pb(request) + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = gcn_backup.UpdateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4082,14 +5946,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_revert_volume(resp) + resp = self._interceptor.post_update_backup(resp) return resp - class _StopReplication(NetAppRestStub): + class _UpdateBackupPolicy(NetAppRestStub): def __hash__(self): - return hash("StopReplication") + return hash("UpdateBackupPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4101,18 +5967,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: replication.StopReplicationRequest, + request: gcn_backup_policy.UpdateBackupPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the stop replication method over HTTP. + r"""Call the update backup policy method over HTTP. Args: - request (~.replication.StopReplicationRequest): - The request object. StopReplicationRequest stops a - replication until resumed. + request (~.gcn_backup_policy.UpdateBackupPolicyRequest): + The request object. UpdateBackupPolicyRequest for + updating a backup policy. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4129,15 +5995,15 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/volumes/*/replications/*}:stop", - "body": "*", + "method": "patch", + "uri": "/v1/{backup_policy.name=projects/*/locations/*/backupPolicies/*}", + "body": "backup_policy", }, ] - request, metadata = self._interceptor.pre_stop_replication( + request, metadata = self._interceptor.pre_update_backup_policy( request, metadata ) - pb_request = replication.StopReplicationRequest.pb(request) + pb_request = gcn_backup_policy.UpdateBackupPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4181,12 +6047,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_stop_replication(resp) + resp = self._interceptor.post_update_backup_policy(resp) return resp - class _UpdateActiveDirectory(NetAppRestStub): + class _UpdateBackupVault(NetAppRestStub): def __hash__(self): - return hash("UpdateActiveDirectory") + return hash("UpdateBackupVault") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, @@ -4202,18 +6068,19 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcn_active_directory.UpdateActiveDirectoryRequest, + request: gcn_backup_vault.UpdateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the update active directory method over HTTP. + r"""Call the update backup vault method over HTTP. Args: - request (~.gcn_active_directory.UpdateActiveDirectoryRequest): - The request object. UpdateActiveDirectoryRequest for - updating an active directory. + request (~.gcn_backup_vault.UpdateBackupVaultRequest): + The request object. UpdateBackupVaultRequest updates + description and/or labels for a + backupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4231,14 +6098,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}", - "body": "active_directory", + "uri": "/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_update_active_directory( + request, metadata = self._interceptor.pre_update_backup_vault( request, metadata ) - pb_request = gcn_active_directory.UpdateActiveDirectoryRequest.pb(request) + pb_request = gcn_backup_vault.UpdateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4282,7 +6149,7 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_active_directory(resp) + resp = self._interceptor.post_update_backup_vault(resp) return resp class _UpdateKmsConfig(NetAppRestStub): @@ -4898,6 +6765,34 @@ def create_active_directory( # In C++ this would require a dynamic_cast return self._CreateActiveDirectory(self._session, self._host, self._interceptor) # type: ignore + @property + def create_backup( + self, + ) -> Callable[[gcn_backup.CreateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.CreateBackupPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.CreateBackupVaultRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def create_kms_config( self, @@ -4950,6 +6845,30 @@ def delete_active_directory( # In C++ this would require a dynamic_cast return self._DeleteActiveDirectory(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[backup.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_policy( + self, + ) -> Callable[[backup_policy.DeleteBackupPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_vault( + self, + ) -> Callable[[backup_vault.DeleteBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def delete_kms_config( self, @@ -5008,6 +6927,28 @@ def get_active_directory( # In C++ this would require a dynamic_cast return self._GetActiveDirectory(self._session, self._host, self._interceptor) # type: ignore + @property + def get_backup(self) -> Callable[[backup.GetBackupRequest], backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_policy( + self, + ) -> Callable[[backup_policy.GetBackupPolicyRequest], backup_policy.BackupPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_vault( + self, + ) -> Callable[[backup_vault.GetBackupVaultRequest], backup_vault.BackupVault]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def get_kms_config(self) -> Callable[[kms.GetKmsConfigRequest], kms.KmsConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -5055,6 +6996,35 @@ def list_active_directories( # In C++ this would require a dynamic_cast return self._ListActiveDirectories(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_policies( + self, + ) -> Callable[ + [backup_policy.ListBackupPoliciesRequest], + backup_policy.ListBackupPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[backup.ListBackupsRequest], backup.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backup_vault.ListBackupVaultsRequest], backup_vault.ListBackupVaultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + @property def list_kms_configs( self, @@ -5143,6 +7113,34 @@ def update_active_directory( # In C++ this would require a dynamic_cast return self._UpdateActiveDirectory(self._session, self._host, self._interceptor) # type: ignore + @property + def update_backup( + self, + ) -> Callable[[gcn_backup.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_policy( + self, + ) -> Callable[ + [gcn_backup_policy.UpdateBackupPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_vault( + self, + ) -> Callable[ + [gcn_backup_vault.UpdateBackupVaultRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def update_kms_config( self, diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index 3146d9ebf6ba..36663628f37c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -22,6 +22,33 @@ ListActiveDirectoriesResponse, UpdateActiveDirectoryRequest, ) +from .backup import ( + Backup, + CreateBackupRequest, + DeleteBackupRequest, + GetBackupRequest, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .backup_policy import ( + BackupPolicy, + CreateBackupPolicyRequest, + DeleteBackupPolicyRequest, + GetBackupPolicyRequest, + ListBackupPoliciesRequest, + ListBackupPoliciesResponse, + UpdateBackupPolicyRequest, +) +from .backup_vault import ( + BackupVault, + CreateBackupVaultRequest, + DeleteBackupVaultRequest, + GetBackupVaultRequest, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + UpdateBackupVaultRequest, +) from .cloud_netapp_service import OperationMetadata from .kms import ( CreateKmsConfigRequest, @@ -69,6 +96,7 @@ ) from .volume import ( AccessType, + BackupConfig, CreateVolumeRequest, DailySchedule, DeleteVolumeRequest, @@ -100,6 +128,27 @@ "ListActiveDirectoriesRequest", "ListActiveDirectoriesResponse", "UpdateActiveDirectoryRequest", + "Backup", + "CreateBackupRequest", + "DeleteBackupRequest", + "GetBackupRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "UpdateBackupRequest", + "BackupPolicy", + "CreateBackupPolicyRequest", + "DeleteBackupPolicyRequest", + "GetBackupPolicyRequest", + "ListBackupPoliciesRequest", + "ListBackupPoliciesResponse", + "UpdateBackupPolicyRequest", + "BackupVault", + "CreateBackupVaultRequest", + "DeleteBackupVaultRequest", + "GetBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "UpdateBackupVaultRequest", "OperationMetadata", "EncryptionType", "ServiceLevel", @@ -139,6 +188,7 @@ "ListStoragePoolsResponse", "StoragePool", "UpdateStoragePoolRequest", + "BackupConfig", "CreateVolumeRequest", "DailySchedule", "DeleteVolumeRequest", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/active_directory.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/active_directory.py index 877e161d2434..e574e89b5b9c 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/active_directory.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/active_directory.py @@ -204,7 +204,7 @@ class ActiveDirectory(proto.Message): Attributes: name (str): - Output only. The resource name of the active directory. + Identifier. The resource name of the active directory. Format: ``projects/{project_number}/locations/{location_id}/activeDirectories/{active_directory_id}``. create_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup.py new file mode 100644 index 000000000000..af11dbbb5e19 --- /dev/null +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup.py @@ -0,0 +1,359 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.netapp.v1", + manifest={ + "Backup", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "CreateBackupRequest", + "DeleteBackupRequest", + "UpdateBackupRequest", + }, +) + + +class Backup(proto.Message): + r"""A NetApp Backup. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the backup. Format: + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}``. + state (google.cloud.netapp_v1.types.Backup.State): + Output only. The backup state. + description (str): + A description of the backup with 2048 + characters or less. Requests with longer + descriptions will be rejected. + volume_usage_bytes (int): + Output only. Size of the file system when the + backup was created. When creating a new volume + from the backup, the volume capacity will have + to be at least as big. + backup_type (google.cloud.netapp_v1.types.Backup.Type): + Output only. Type of backup, manually created + or created by a backup policy. + source_volume (str): + Volume full name of this backup belongs to. Format: + ``projects/{projects_id}/locations/{location}/volumes/{volume_id}`` + source_snapshot (str): + If specified, backup will be created from the given + snapshot. If not specified, there will be a new snapshot + taken to initiate the backup creation. Format: + ``projects/{project_id}/locations/{location}/volumes/{volume_id}/snapshots/{snapshot_id}`` + + This field is a member of `oneof`_ ``_source_snapshot``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup was + created. + labels (MutableMapping[str, str]): + Resource labels to represent user provided + metadata. + chain_storage_bytes (int): + Output only. Total size of all backups in a + chain in bytes = baseline backup size + + sum(incremental backup size) + """ + + class State(proto.Enum): + r"""The Backup States + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + Backup is being created. While in this state, + the snapshot for the backup point-in-time may + not have been created yet, and so the + point-in-time may not have been fixed. + UPLOADING (2): + Backup is being uploaded. While in this + state, none of the writes to the volume will be + included in the backup. + READY (3): + Backup is available for use. + DELETING (4): + Backup is being deleted. + ERROR (5): + Backup is not valid and cannot be used for + creating new volumes or restoring existing + volumes. + UPDATING (6): + Backup is being updated. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + UPLOADING = 2 + READY = 3 + DELETING = 4 + ERROR = 5 + UPDATING = 6 + + class Type(proto.Enum): + r"""Backup types. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified backup type. + MANUAL (1): + Manual backup type. + SCHEDULED (2): + Scheduled backup type. + """ + TYPE_UNSPECIFIED = 0 + MANUAL = 1 + SCHEDULED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + volume_usage_bytes: int = proto.Field( + proto.INT64, + number=4, + ) + backup_type: Type = proto.Field( + proto.ENUM, + number=5, + enum=Type, + ) + source_volume: str = proto.Field( + proto.STRING, + number=6, + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + chain_storage_bytes: int = proto.Field( + proto.INT64, + number=10, + ) + + +class ListBackupsRequest(proto.Message): + r"""ListBackupsRequest lists backups. + + Attributes: + parent (str): + Required. The backupVault for which to retrieve backup + information, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. + To retrieve backup information for all locations, use "-" + for the ``{location}`` value. To retrieve backup information + for all backupVaults, use "-" for the ``{backup_vault_id}`` + value. To retrieve backup information for a volume, use "-" + for the ``{backup_vault_id}`` value and specify volume full + name with the filter. + page_size (int): + The maximum number of items to return. The + service may return fewer than this value. The + maximum value is 1000; values above 1000 will be + coerced to 1000. + page_token (str): + The next_page_token value to use if there are additional + results to retrieve for this list request. + order_by (str): + Sort results. Supported values are "name", + "name desc" or "" (unsorted). + filter (str): + The standard list filter. + If specified, backups will be returned based on + the attribute name that matches the filter + expression. If empty, then no backups are + filtered out. See https://google.aip.dev/160 + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupsResponse(proto.Message): + r"""ListBackupsResponse is the result of ListBackupsRequest. + + Attributes: + backups (MutableSequence[google.cloud.netapp_v1.types.Backup]): + A list of backups in the project. + next_page_token (str): + The token you can use to retrieve the next + page of results. Not returned if there are no + more results in the list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""GetBackupRequest gets the state of a backup. + + Attributes: + name (str): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateBackupRequest(proto.Message): + r"""CreateBackupRequest creates a backup. + + Attributes: + parent (str): + Required. The NetApp backupVault to create the backups of, + in the format + ``projects/*/locations/*/backupVaults/{backup_vault_id}`` + backup_id (str): + Required. The ID to use for the backup. The ID must be + unique within the specified backupVault. This value must + start with a lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end with a hyphen. + Values that do not match this pattern will trigger an + INVALID_ARGUMENT error. + backup (google.cloud.netapp_v1.types.Backup): + Required. A backup resource + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=3, + message="Backup", + ) + + +class DeleteBackupRequest(proto.Message): + r"""DeleteBackupRequest deletes a backup. + + Attributes: + name (str): + Required. The backup resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBackupRequest(proto.Message): + r"""UpdateBackupRequest updates description and/or labels for a + backup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then all + fields will be overwritten. + backup (google.cloud.netapp_v1.types.Backup): + Required. The backup being updated + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=2, + message="Backup", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_policy.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_policy.py new file mode 100644 index 000000000000..296a3f768c0b --- /dev/null +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_policy.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.netapp.v1", + manifest={ + "BackupPolicy", + "CreateBackupPolicyRequest", + "GetBackupPolicyRequest", + "ListBackupPoliciesRequest", + "ListBackupPoliciesResponse", + "UpdateBackupPolicyRequest", + "DeleteBackupPolicyRequest", + }, +) + + +class BackupPolicy(proto.Message): + r"""Backup Policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the backup policy. Format: + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}``. + daily_backup_limit (int): + Number of daily backups to keep. Note that + the minimum daily backup limit is 2. + + This field is a member of `oneof`_ ``_daily_backup_limit``. + weekly_backup_limit (int): + Number of weekly backups to keep. Note that + the sum of daily, weekly and monthly backups + should be greater than 1. + + This field is a member of `oneof`_ ``_weekly_backup_limit``. + monthly_backup_limit (int): + Number of monthly backups to keep. Note that + the sum of daily, weekly and monthly backups + should be greater than 1. + + This field is a member of `oneof`_ ``_monthly_backup_limit``. + description (str): + Description of the backup policy. + + This field is a member of `oneof`_ ``_description``. + enabled (bool): + If enabled, make backups automatically + according to the schedules. This will be applied + to all volumes that have this policy attached + and enforced on volume level. If not specified, + default is true. + + This field is a member of `oneof`_ ``_enabled``. + assigned_volume_count (int): + Output only. The total number of volumes + assigned by this backup policy. + + This field is a member of `oneof`_ ``_assigned_volume_count``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup policy + was created. + labels (MutableMapping[str, str]): + Resource labels to represent user provided + metadata. + state (google.cloud.netapp_v1.types.BackupPolicy.State): + Output only. The backup policy state. + """ + + class State(proto.Enum): + r""" + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + BackupPolicy is being created. + READY (2): + BackupPolicy is available for use. + DELETING (3): + BackupPolicy is being deleted. + ERROR (4): + BackupPolicy is not valid and cannot be used. + UPDATING (5): + BackupPolicy is being updated. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + ERROR = 4 + UPDATING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + daily_backup_limit: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + weekly_backup_limit: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + monthly_backup_limit: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + assigned_volume_count: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + + +class CreateBackupPolicyRequest(proto.Message): + r"""CreateBackupPolicyRequest creates a backupPolicy. + + Attributes: + parent (str): + Required. The location to create the backup policies of, in + the format ``projects/{project_id}/locations/{location}`` + backup_policy (google.cloud.netapp_v1.types.BackupPolicy): + Required. A backupPolicy resource + backup_policy_id (str): + Required. The ID to use for the backup + policy. The ID must be unique within the + specified location. This value must start with a + lowercase letter followed by up to 62 lowercase + letters, numbers, or hyphens, and cannot end + with a hyphen. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_policy: "BackupPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupPolicy", + ) + backup_policy_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetBackupPolicyRequest(proto.Message): + r"""GetBackupPolicyRequest gets the state of a backupPolicy. + + Attributes: + name (str): + Required. The backupPolicy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupPoliciesRequest(proto.Message): + r"""ListBackupPoliciesRequest for requesting multiple backup + policies. + + Attributes: + parent (str): + Required. Parent value for + ListBackupPoliciesRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, the server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupPoliciesResponse(proto.Message): + r"""ListBackupPoliciesResponse contains all the backup policies + requested. + + Attributes: + backup_policies (MutableSequence[google.cloud.netapp_v1.types.BackupPolicy]): + The list of backup policies. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_policies: MutableSequence["BackupPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class UpdateBackupPolicyRequest(proto.Message): + r"""UpdateBackupPolicyRequest for updating a backup policy. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup Policy resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + backup_policy (google.cloud.netapp_v1.types.BackupPolicy): + Required. The backup policy being updated + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_policy: "BackupPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupPolicy", + ) + + +class DeleteBackupPolicyRequest(proto.Message): + r"""DeleteBackupPolicyRequest deletes a backup policy. + + Attributes: + name (str): + Required. The backup policy resource name, in the format + ``projects/{project_id}/locations/{location}/backupPolicies/{backup_policy_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py new file mode 100644 index 000000000000..940c0a239a97 --- /dev/null +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/backup_vault.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.netapp.v1", + manifest={ + "BackupVault", + "GetBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "CreateBackupVaultRequest", + "DeleteBackupVaultRequest", + "UpdateBackupVaultRequest", + }, +) + + +class BackupVault(proto.Message): + r"""A NetApp BackupVault. + + Attributes: + name (str): + Identifier. The resource name of the backup vault. Format: + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. + state (google.cloud.netapp_v1.types.BackupVault.State): + Output only. The backup vault state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time of the backup vault. + description (str): + Description of the backup vault. + labels (MutableMapping[str, str]): + Resource labels to represent user provided + metadata. + """ + + class State(proto.Enum): + r"""The Backup Vault States + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + BackupVault is being created. + READY (2): + BackupVault is available for use. + DELETING (3): + BackupVault is being deleted. + ERROR (4): + BackupVault is not valid and cannot be used. + UPDATING (5): + BackupVault is being updated. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + ERROR = 4 + UPDATING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class GetBackupVaultRequest(proto.Message): + r"""GetBackupVaultRequest gets the state of a backupVault. + + Attributes: + name (str): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupVaultsRequest(proto.Message): + r"""ListBackupVaultsRequest lists backupVaults. + + Attributes: + parent (str): + Required. The location for which to retrieve backupVault + information, in the format + ``projects/{project_id}/locations/{location}``. + page_size (int): + The maximum number of items to return. + page_token (str): + The next_page_token value to use if there are additional + results to retrieve for this list request. + order_by (str): + Sort results. Supported values are "name", + "name desc" or "" (unsorted). + filter (str): + List filter. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupVaultsResponse(proto.Message): + r"""ListBackupVaultsResponse is the result of + ListBackupVaultsRequest. + + Attributes: + backup_vaults (MutableSequence[google.cloud.netapp_v1.types.BackupVault]): + A list of backupVaults in the project for the + specified location. + next_page_token (str): + The token you can use to retrieve the next + page of results. Not returned if there are no + more results in the list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateBackupVaultRequest(proto.Message): + r"""CreateBackupVaultRequest creates a backup vault. + + Attributes: + parent (str): + Required. The location to create the backup vaults, in the + format ``projects/{project_id}/locations/{location}`` + backup_vault_id (str): + Required. The ID to use for the backupVault. The ID must be + unique within the specified location. The max supported + length is 63 characters. This value must start with a + lowercase letter followed by up to 62 lowercase letters, + numbers, or hyphens, and cannot end with a hyphen. Values + that do not match this pattern will trigger an + INVALID_ARGUMENT error. + backup_vault (google.cloud.netapp_v1.types.BackupVault): + Required. A backupVault resource + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_vault_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupVault", + ) + + +class DeleteBackupVaultRequest(proto.Message): + r"""DeleteBackupVaultRequest deletes a backupVault. + + Attributes: + name (str): + Required. The backupVault resource name, in the format + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBackupVaultRequest(proto.Message): + r"""UpdateBackupVaultRequest updates description and/or labels + for a backupVault. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource to be updated. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then all + fields will be overwritten. + backup_vault (google.cloud.netapp_v1.types.BackupVault): + Required. The backupVault being updated + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupVault", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py index daabb71860d2..31fe9d89a67d 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py @@ -33,11 +33,11 @@ class ServiceLevel(proto.Enum): Values: SERVICE_LEVEL_UNSPECIFIED (0): - No description available. + Unspecified service level. PREMIUM (1): - No description available. + Premium service level. EXTREME (2): - No description available. + Extreme service level. STANDARD (3): Standard (Software offering) """ diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py index ba829bd4155f..bdcd36c86db0 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/kms.py @@ -263,7 +263,7 @@ class KmsConfig(proto.Message): Attributes: name (str): - Output only. Name of the KmsConfig. + Identifier. Name of the KmsConfig. crypto_key_name (str): Required. Customer managed crypto key resource full name. Format: diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/replication.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/replication.py index 5aec6f7d0dc9..37867e3e3f70 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/replication.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/replication.py @@ -143,7 +143,7 @@ class Replication(proto.Message): Attributes: name (str): - Output only. The resource name of the Replication. Format: + Identifier. The resource name of the Replication. Format: ``projects/{project_id}/locations/{location}/volumes/{volume_id}/replications/{replication_id}``. state (google.cloud.netapp_v1.types.Replication.State): Output only. State of the replication. diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/snapshot.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/snapshot.py index d6a7d62b9654..29b7dcbb5b60 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/snapshot.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/snapshot.py @@ -202,7 +202,7 @@ class Snapshot(proto.Message): Attributes: name (str): - Output only. The resource name of the snapshot. Format: + Identifier. The resource name of the snapshot. Format: ``projects/{project_id}/locations/{location}/volumes/{volume_id}/snapshots/{snapshot_id}``. state (google.cloud.netapp_v1.types.Snapshot.State): Output only. The snapshot state. diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py index 367ebf147dc2..344a202dd337 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py @@ -206,7 +206,7 @@ class StoragePool(proto.Message): Attributes: name (str): - Output only. Name of the storage pool + Identifier. Name of the storage pool service_level (google.cloud.netapp_v1.types.ServiceLevel): Required. Service level of the storage pool capacity_gib (int): @@ -248,8 +248,8 @@ class StoragePool(proto.Message): Output only. Specifies the current pool encryption key source. global_access_allowed (bool): - Optional. Allows SO pool to access AD or DNS - server from other regions. + Deprecated. Used to allow SO pool to access + AD or DNS server from other regions. This field is a member of `oneof`_ ``_global_access_allowed``. """ diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index 8c426b2e75f9..fc62c322ee47 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -48,12 +48,14 @@ "MonthlySchedule", "MountOption", "RestoreParameters", + "BackupConfig", }, ) class Protocols(proto.Enum): - r""" + r"""Protocols is an enum of all the supported network protocols + for a volume. Values: PROTOCOLS_UNSPECIFIED (0): @@ -72,7 +74,8 @@ class Protocols(proto.Enum): class AccessType(proto.Enum): - r""" + r"""AccessType is an enum of all the supported access types for a + volume. Values: ACCESS_TYPE_UNSPECIFIED (0): @@ -354,9 +357,11 @@ class RevertVolumeRequest(proto.Message): class Volume(proto.Message): r"""Volume provides a filesystem that you can mount. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): - Output only. Name of the volume + Identifier. Name of the volume state (google.cloud.netapp_v1.types.Volume.State): Output only. State of the volume state_details (str): @@ -433,6 +438,10 @@ class Volume(proto.Message): has_replication (bool): Output only. Indicates whether the volume is part of a replication relationship. + backup_config (google.cloud.netapp_v1.types.BackupConfig): + BackupConfig of the volume. + + This field is a member of `oneof`_ ``_backup_config``. restricted_actions (MutableSequence[google.cloud.netapp_v1.types.RestrictedAction]): Optional. List of actions that are restricted on this volume. @@ -596,6 +605,12 @@ class State(proto.Enum): proto.BOOL, number=29, ) + backup_config: "BackupConfig" = proto.Field( + proto.MESSAGE, + number=30, + optional=True, + message="BackupConfig", + ) restricted_actions: MutableSequence["RestrictedAction"] = proto.RepeatedField( proto.ENUM, number=31, @@ -1033,6 +1048,10 @@ class RestoreParameters(proto.Message): r"""The RestoreParameters if volume is created from a snapshot or backup. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -1043,6 +1062,11 @@ class RestoreParameters(proto.Message): projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot} + This field is a member of `oneof`_ ``source``. + source_backup (str): + Full name of the backup resource. Format: + projects/{project}/locations/{location}/backupVaults/{backup_vault_id}/backups/{backup_id} + This field is a member of `oneof`_ ``source``. """ @@ -1051,6 +1075,47 @@ class RestoreParameters(proto.Message): number=1, oneof="source", ) + source_backup: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + + +class BackupConfig(proto.Message): + r"""BackupConfig contains backup related config on a volume. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_policies (MutableSequence[str]): + Optional. When specified, schedule backups + will be created based on the policy + configuration. + backup_vault (str): + Optional. Name of backup vault. Format: + projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id} + scheduled_backup_enabled (bool): + Optional. When set to true, scheduled backup + is enabled on the volume. This field should be + nil when there's no backup policy attached. + + This field is a member of `oneof`_ ``_scheduled_backup_enabled``. + """ + + backup_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=2, + ) + scheduled_backup_enabled: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_async.py new file mode 100644 index 000000000000..50ba16902d3d --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_create_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_async.py new file mode 100644 index 000000000000..3a32e7b1ae73 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackupPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_create_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupPolicyRequest( + parent="parent_value", + backup_policy_id="backup_policy_id_value", + ) + + # Make the request + operation = client.create_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackupPolicy_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_sync.py new file mode 100644 index 000000000000..bac988fb9d60 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackupPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_create_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupPolicyRequest( + parent="parent_value", + backup_policy_id="backup_policy_id_value", + ) + + # Make the request + operation = client.create_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackupPolicy_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_sync.py new file mode 100644 index 000000000000..a5e26b8a6388 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_create_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_async.py new file mode 100644 index 000000000000..9dc5c104a123 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_create_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackupVault_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_sync.py new file mode 100644 index 000000000000..e7a2b64b3332 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_create_backup_vault_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_CreateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_create_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_CreateBackupVault_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_async.py new file mode 100644 index 000000000000..c0135cbac5de --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_delete_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_async.py new file mode 100644 index 000000000000..c90cd411d288 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackupPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_delete_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackupPolicy_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_sync.py new file mode 100644 index 000000000000..3ab88c192b5e --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackupPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_delete_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackupPolicy_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_sync.py new file mode 100644 index 000000000000..13efe079cece --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_delete_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_async.py new file mode 100644 index 000000000000..d83731d2deef --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_delete_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackupVault_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_sync.py new file mode 100644 index 000000000000..6e46f74f93d3 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_delete_backup_vault_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_DeleteBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_delete_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_DeleteBackupVault_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_async.py new file mode 100644 index 000000000000..8aa588a6bc8c --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_get_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_async.py new file mode 100644 index 000000000000..4bc002561fbe --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackupPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_get_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_policy(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackupPolicy_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_sync.py new file mode 100644 index 000000000000..71dad7cb2613 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackupPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_get_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_policy(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackupPolicy_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_sync.py new file mode 100644 index 000000000000..bf2f40c534f7 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_get_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_async.py new file mode 100644 index 000000000000..142a21b45cb8 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_get_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackupVault_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_sync.py new file mode 100644 index 000000000000..90f48e6cd6d5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_get_backup_vault_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_GetBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_get_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_GetBackupVault_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_async.py new file mode 100644 index 000000000000..ef788522e1ca --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackupPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_list_backup_policies(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackupPolicies_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_sync.py new file mode 100644 index 000000000000..c71766cdc155 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackupPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_list_backup_policies(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackupPolicies_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_async.py new file mode 100644 index 000000000000..3c29275adc62 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_list_backup_vaults(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackupVaults_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_sync.py new file mode 100644 index 000000000000..b732b2cf533b --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_list_backup_vaults(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackupVaults_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_async.py new file mode 100644 index 000000000000..7e96d65485c5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_list_backups(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackups_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_sync.py new file mode 100644 index 000000000000..993d184cb21e --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_list_backups(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END netapp_v1_generated_NetApp_ListBackups_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_async.py new file mode 100644 index 000000000000..79550a1e4ee1 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_update_backup(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackup_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_async.py new file mode 100644 index 000000000000..ce3156677959 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackupPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_update_backup_policy(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupPolicyRequest( + ) + + # Make the request + operation = client.update_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackupPolicy_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_sync.py new file mode 100644 index 000000000000..4a6238a8c3a9 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackupPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_update_backup_policy(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupPolicyRequest( + ) + + # Make the request + operation = client.update_backup_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackupPolicy_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_sync.py new file mode 100644 index 000000000000..5195bfdf40af --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_update_backup(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackup_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_async.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_async.py new file mode 100644 index 000000000000..d3a431cd5118 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +async def sample_update_backup_vault(): + # Create a client + client = netapp_v1.NetAppAsyncClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackupVault_async] diff --git a/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_sync.py b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_sync.py new file mode 100644 index 000000000000..3d99a6db48d5 --- /dev/null +++ b/packages/google-cloud-netapp/samples/generated_samples/netapp_v1_generated_net_app_update_backup_vault_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-netapp + + +# [START netapp_v1_generated_NetApp_UpdateBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import netapp_v1 + + +def sample_update_backup_vault(): + # Create a client + client = netapp_v1.NetAppClient() + + # Initialize request argument(s) + request = netapp_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END netapp_v1_generated_NetApp_UpdateBackupVault_sync] diff --git a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json index d27c8280de07..cea344548067 100644 --- a/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json +++ b/packages/google-cloud-netapp/samples/generated_samples/snippet_metadata_google.cloud.netapp.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-netapp", - "version": "0.3.2" + "version": "0.3.3" }, "snippets": [ { @@ -196,30 +196,30 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_backup_policy", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackupPolicy", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateKmsConfig" + "shortName": "CreateBackupPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupPolicyRequest" }, { "name": "parent", "type": "str" }, { - "name": "kms_config", - "type": "google.cloud.netapp_v1.types.KmsConfig" + "name": "backup_policy", + "type": "google.cloud.netapp_v1.types.BackupPolicy" }, { - "name": "kms_config_id", + "name": "backup_policy_id", "type": "str" }, { @@ -236,21 +236,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_kms_config" + "shortName": "create_backup_policy" }, - "description": "Sample for CreateKmsConfig", - "file": "netapp_v1_generated_net_app_create_kms_config_async.py", + "description": "Sample for CreateBackupPolicy", + "file": "netapp_v1_generated_net_app_create_backup_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateKmsConfig_async", + "regionTag": "netapp_v1_generated_NetApp_CreateBackupPolicy_async", "segments": [ { - "end": 60, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 56, "start": 27, "type": "SHORT" }, @@ -260,22 +260,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_kms_config_async.py" + "title": "netapp_v1_generated_net_app_create_backup_policy_async.py" }, { "canonical": true, @@ -284,30 +284,30 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.create_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppClient.create_backup_policy", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackupPolicy", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateKmsConfig" + "shortName": "CreateBackupPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupPolicyRequest" }, { "name": "parent", "type": "str" }, { - "name": "kms_config", - "type": "google.cloud.netapp_v1.types.KmsConfig" + "name": "backup_policy", + "type": "google.cloud.netapp_v1.types.BackupPolicy" }, { - "name": "kms_config_id", + "name": "backup_policy_id", "type": "str" }, { @@ -324,21 +324,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_kms_config" + "shortName": "create_backup_policy" }, - "description": "Sample for CreateKmsConfig", - "file": "netapp_v1_generated_net_app_create_kms_config_sync.py", + "description": "Sample for CreateBackupPolicy", + "file": "netapp_v1_generated_net_app_create_backup_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateKmsConfig_sync", + "regionTag": "netapp_v1_generated_NetApp_CreateBackupPolicy_sync", "segments": [ { - "end": 60, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 56, "start": 27, "type": "SHORT" }, @@ -348,22 +348,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_kms_config_sync.py" + "title": "netapp_v1_generated_net_app_create_backup_policy_sync.py" }, { "canonical": true, @@ -373,30 +373,30 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateReplication", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateReplication" + "shortName": "CreateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateReplicationRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupVaultRequest" }, { "name": "parent", "type": "str" }, { - "name": "replication", - "type": "google.cloud.netapp_v1.types.Replication" + "name": "backup_vault", + "type": "google.cloud.netapp_v1.types.BackupVault" }, { - "name": "replication_id", + "name": "backup_vault_id", "type": "str" }, { @@ -413,21 +413,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_replication" + "shortName": "create_backup_vault" }, - "description": "Sample for CreateReplication", - "file": "netapp_v1_generated_net_app_create_replication_async.py", + "description": "Sample for CreateBackupVault", + "file": "netapp_v1_generated_net_app_create_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateReplication_async", + "regionTag": "netapp_v1_generated_NetApp_CreateBackupVault_async", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -437,22 +437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_replication_async.py" + "title": "netapp_v1_generated_net_app_create_backup_vault_async.py" }, { "canonical": true, @@ -461,30 +461,30 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.create_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.create_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateReplication", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateReplication" + "shortName": "CreateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateReplicationRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupVaultRequest" }, { "name": "parent", "type": "str" }, { - "name": "replication", - "type": "google.cloud.netapp_v1.types.Replication" + "name": "backup_vault", + "type": "google.cloud.netapp_v1.types.BackupVault" }, { - "name": "replication_id", + "name": "backup_vault_id", "type": "str" }, { @@ -501,21 +501,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_replication" + "shortName": "create_backup_vault" }, - "description": "Sample for CreateReplication", - "file": "netapp_v1_generated_net_app_create_replication_sync.py", + "description": "Sample for CreateBackupVault", + "file": "netapp_v1_generated_net_app_create_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_CreateBackupVault_sync", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -525,22 +525,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_replication_sync.py" + "title": "netapp_v1_generated_net_app_create_backup_vault_sync.py" }, { "canonical": true, @@ -550,30 +550,30 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateSnapshot" + "shortName": "CreateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateSnapshotRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupRequest" }, { "name": "parent", "type": "str" }, { - "name": "snapshot", - "type": "google.cloud.netapp_v1.types.Snapshot" + "name": "backup", + "type": "google.cloud.netapp_v1.types.Backup" }, { - "name": "snapshot_id", + "name": "backup_id", "type": "str" }, { @@ -590,13 +590,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_snapshot" + "shortName": "create_backup" }, - "description": "Sample for CreateSnapshot", - "file": "netapp_v1_generated_net_app_create_snapshot_async.py", + "description": "Sample for CreateBackup", + "file": "netapp_v1_generated_net_app_create_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateSnapshot_async", + "regionTag": "netapp_v1_generated_NetApp_CreateBackup_async", "segments": [ { "end": 56, @@ -629,7 +629,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_snapshot_async.py" + "title": "netapp_v1_generated_net_app_create_backup_async.py" }, { "canonical": true, @@ -638,30 +638,30 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.create_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppClient.create_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.CreateBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateSnapshot" + "shortName": "CreateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateSnapshotRequest" + "type": "google.cloud.netapp_v1.types.CreateBackupRequest" }, { "name": "parent", "type": "str" }, { - "name": "snapshot", - "type": "google.cloud.netapp_v1.types.Snapshot" + "name": "backup", + "type": "google.cloud.netapp_v1.types.Backup" }, { - "name": "snapshot_id", + "name": "backup_id", "type": "str" }, { @@ -678,13 +678,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_snapshot" + "shortName": "create_backup" }, - "description": "Sample for CreateSnapshot", - "file": "netapp_v1_generated_net_app_create_snapshot_sync.py", + "description": "Sample for CreateBackup", + "file": "netapp_v1_generated_net_app_create_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateSnapshot_sync", + "regionTag": "netapp_v1_generated_NetApp_CreateBackup_sync", "segments": [ { "end": 56, @@ -717,7 +717,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_snapshot_sync.py" + "title": "netapp_v1_generated_net_app_create_backup_sync.py" }, { "canonical": true, @@ -727,30 +727,30 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.CreateKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateStoragePool" + "shortName": "CreateKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.CreateKmsConfigRequest" }, { "name": "parent", "type": "str" }, { - "name": "storage_pool", - "type": "google.cloud.netapp_v1.types.StoragePool" + "name": "kms_config", + "type": "google.cloud.netapp_v1.types.KmsConfig" }, { - "name": "storage_pool_id", + "name": "kms_config_id", "type": "str" }, { @@ -767,21 +767,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_storage_pool" + "shortName": "create_kms_config" }, - "description": "Sample for CreateStoragePool", - "file": "netapp_v1_generated_net_app_create_storage_pool_async.py", + "description": "Sample for CreateKmsConfig", + "file": "netapp_v1_generated_net_app_create_kms_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateStoragePool_async", + "regionTag": "netapp_v1_generated_NetApp_CreateKmsConfig_async", "segments": [ { - "end": 62, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 60, "start": 27, "type": "SHORT" }, @@ -791,22 +791,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_storage_pool_async.py" + "title": "netapp_v1_generated_net_app_create_kms_config_async.py" }, { "canonical": true, @@ -815,30 +815,30 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.create_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppClient.create_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.CreateKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateStoragePool" + "shortName": "CreateKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.CreateKmsConfigRequest" }, { "name": "parent", "type": "str" }, { - "name": "storage_pool", - "type": "google.cloud.netapp_v1.types.StoragePool" + "name": "kms_config", + "type": "google.cloud.netapp_v1.types.KmsConfig" }, { - "name": "storage_pool_id", + "name": "kms_config_id", "type": "str" }, { @@ -855,21 +855,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_storage_pool" + "shortName": "create_kms_config" }, - "description": "Sample for CreateStoragePool", - "file": "netapp_v1_generated_net_app_create_storage_pool_sync.py", + "description": "Sample for CreateKmsConfig", + "file": "netapp_v1_generated_net_app_create_kms_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateStoragePool_sync", + "regionTag": "netapp_v1_generated_NetApp_CreateKmsConfig_sync", "segments": [ { - "end": 62, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 60, "start": 27, "type": "SHORT" }, @@ -879,22 +879,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_storage_pool_sync.py" + "title": "netapp_v1_generated_net_app_create_kms_config_sync.py" }, { "canonical": true, @@ -904,30 +904,30 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_volume", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateVolume", + "fullName": "google.cloud.netapp.v1.NetApp.CreateReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateVolume" + "shortName": "CreateReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateVolumeRequest" + "type": "google.cloud.netapp_v1.types.CreateReplicationRequest" }, { "name": "parent", "type": "str" }, { - "name": "volume", - "type": "google.cloud.netapp_v1.types.Volume" + "name": "replication", + "type": "google.cloud.netapp_v1.types.Replication" }, { - "name": "volume_id", + "name": "replication_id", "type": "str" }, { @@ -944,21 +944,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_volume" + "shortName": "create_replication" }, - "description": "Sample for CreateVolume", - "file": "netapp_v1_generated_net_app_create_volume_async.py", + "description": "Sample for CreateReplication", + "file": "netapp_v1_generated_net_app_create_replication_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateVolume_async", + "regionTag": "netapp_v1_generated_NetApp_CreateReplication_async", "segments": [ { - "end": 63, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 61, "start": 27, "type": "SHORT" }, @@ -968,22 +968,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_volume_async.py" + "title": "netapp_v1_generated_net_app_create_replication_async.py" }, { "canonical": true, @@ -992,30 +992,30 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.create_volume", + "fullName": "google.cloud.netapp_v1.NetAppClient.create_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.CreateVolume", + "fullName": "google.cloud.netapp.v1.NetApp.CreateReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "CreateVolume" + "shortName": "CreateReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.CreateVolumeRequest" + "type": "google.cloud.netapp_v1.types.CreateReplicationRequest" }, { "name": "parent", "type": "str" }, { - "name": "volume", - "type": "google.cloud.netapp_v1.types.Volume" + "name": "replication", + "type": "google.cloud.netapp_v1.types.Replication" }, { - "name": "volume_id", + "name": "replication_id", "type": "str" }, { @@ -1032,21 +1032,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_volume" + "shortName": "create_replication" }, - "description": "Sample for CreateVolume", - "file": "netapp_v1_generated_net_app_create_volume_sync.py", + "description": "Sample for CreateReplication", + "file": "netapp_v1_generated_net_app_create_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_CreateVolume_sync", + "regionTag": "netapp_v1_generated_NetApp_CreateReplication_sync", "segments": [ { - "end": 63, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 61, "start": 27, "type": "SHORT" }, @@ -1056,22 +1056,2477 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_snapshot", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateSnapshot", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateSnapshotRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "snapshot", + "type": "google.cloud.netapp_v1.types.Snapshot" + }, + { + "name": "snapshot_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "netapp_v1_generated_net_app_create_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateSnapshot_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.create_snapshot", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateSnapshot", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateSnapshotRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "snapshot", + "type": "google.cloud.netapp_v1.types.Snapshot" + }, + { + "name": "snapshot_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "netapp_v1_generated_net_app_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateSnapshot_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_storage_pool", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateStoragePool", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateStoragePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateStoragePoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "storage_pool", + "type": "google.cloud.netapp_v1.types.StoragePool" + }, + { + "name": "storage_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_storage_pool" + }, + "description": "Sample for CreateStoragePool", + "file": "netapp_v1_generated_net_app_create_storage_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateStoragePool_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_storage_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.create_storage_pool", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateStoragePool", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateStoragePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateStoragePoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "storage_pool", + "type": "google.cloud.netapp_v1.types.StoragePool" + }, + { + "name": "storage_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_storage_pool" + }, + "description": "Sample for CreateStoragePool", + "file": "netapp_v1_generated_net_app_create_storage_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateStoragePool_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_storage_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.create_volume", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateVolume", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateVolumeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "volume", + "type": "google.cloud.netapp_v1.types.Volume" + }, + { + "name": "volume_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_volume" + }, + "description": "Sample for CreateVolume", + "file": "netapp_v1_generated_net_app_create_volume_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateVolume_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_volume_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.create_volume", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.CreateVolume", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "CreateVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.CreateVolumeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "volume", + "type": "google.cloud.netapp_v1.types.Volume" + }, + { + "name": "volume_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_volume" + }, + "description": "Sample for CreateVolume", + "file": "netapp_v1_generated_net_app_create_volume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_CreateVolume_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_create_volume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_active_directory", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteActiveDirectory", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteActiveDirectory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteActiveDirectoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_active_directory" + }, + "description": "Sample for DeleteActiveDirectory", + "file": "netapp_v1_generated_net_app_delete_active_directory_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteActiveDirectory_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_active_directory_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_active_directory", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteActiveDirectory", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteActiveDirectory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteActiveDirectoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_active_directory" + }, + "description": "Sample for DeleteActiveDirectory", + "file": "netapp_v1_generated_net_app_delete_active_directory_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteActiveDirectory_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_active_directory_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_backup_policy", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackupPolicy", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackupPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_policy" + }, + "description": "Sample for DeleteBackupPolicy", + "file": "netapp_v1_generated_net_app_delete_backup_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackupPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_backup_policy", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackupPolicy", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackupPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_policy" + }, + "description": "Sample for DeleteBackupPolicy", + "file": "netapp_v1_generated_net_app_delete_backup_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackupPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackupVault", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "netapp_v1_generated_net_app_delete_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackupVault_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackupVault", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "netapp_v1_generated_net_app_delete_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackupVault_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "netapp_v1_generated_net_app_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_backup", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteBackup", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "netapp_v1_generated_net_app_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_kms_config", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteKmsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_kms_config" + }, + "description": "Sample for DeleteKmsConfig", + "file": "netapp_v1_generated_net_app_delete_kms_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_kms_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_kms_config", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteKmsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_kms_config" + }, + "description": "Sample for DeleteKmsConfig", + "file": "netapp_v1_generated_net_app_delete_kms_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_kms_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_replication", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_replication" + }, + "description": "Sample for DeleteReplication", + "file": "netapp_v1_generated_net_app_delete_replication_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_replication_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_replication", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_replication" + }, + "description": "Sample for DeleteReplication", + "file": "netapp_v1_generated_net_app_delete_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_snapshot", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "netapp_v1_generated_net_app_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteSnapshot_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_snapshot", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "netapp_v1_generated_net_app_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteSnapshot_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_storage_pool", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteStoragePool", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteStoragePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteStoragePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_storage_pool" + }, + "description": "Sample for DeleteStoragePool", + "file": "netapp_v1_generated_net_app_delete_storage_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteStoragePool_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_storage_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_storage_pool", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteStoragePool", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteStoragePool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteStoragePoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_storage_pool" + }, + "description": "Sample for DeleteStoragePool", + "file": "netapp_v1_generated_net_app_delete_storage_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteStoragePool_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_storage_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_volume", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteVolume", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_volume" + }, + "description": "Sample for DeleteVolume", + "file": "netapp_v1_generated_net_app_delete_volume_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteVolume_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_volume_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.delete_volume", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.DeleteVolume", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "DeleteVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.DeleteVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_volume" + }, + "description": "Sample for DeleteVolume", + "file": "netapp_v1_generated_net_app_delete_volume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_DeleteVolume_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_delete_volume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.encrypt_volumes", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.EncryptVolumes", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "EncryptVolumes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.EncryptVolumesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "encrypt_volumes" + }, + "description": "Sample for EncryptVolumes", + "file": "netapp_v1_generated_net_app_encrypt_volumes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_EncryptVolumes_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_encrypt_volumes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.encrypt_volumes", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.EncryptVolumes", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "EncryptVolumes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.EncryptVolumesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "encrypt_volumes" + }, + "description": "Sample for EncryptVolumes", + "file": "netapp_v1_generated_net_app_encrypt_volumes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_EncryptVolumes_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_encrypt_volumes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_active_directory", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetActiveDirectory", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetActiveDirectory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetActiveDirectoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.netapp_v1.types.ActiveDirectory", + "shortName": "get_active_directory" + }, + "description": "Sample for GetActiveDirectory", + "file": "netapp_v1_generated_net_app_get_active_directory_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetActiveDirectory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_get_active_directory_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.get_active_directory", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetActiveDirectory", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetActiveDirectory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetActiveDirectoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.netapp_v1.types.ActiveDirectory", + "shortName": "get_active_directory" + }, + "description": "Sample for GetActiveDirectory", + "file": "netapp_v1_generated_net_app_get_active_directory_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetActiveDirectory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_get_active_directory_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", + "shortName": "NetAppAsyncClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_backup_policy", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetBackupPolicy", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetBackupPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetBackupPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.netapp_v1.types.BackupPolicy", + "shortName": "get_backup_policy" + }, + "description": "Sample for GetBackupPolicy", + "file": "netapp_v1_generated_net_app_get_backup_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetBackupPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "netapp_v1_generated_net_app_get_backup_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.netapp_v1.NetAppClient", + "shortName": "NetAppClient" + }, + "fullName": "google.cloud.netapp_v1.NetAppClient.get_backup_policy", + "method": { + "fullName": "google.cloud.netapp.v1.NetApp.GetBackupPolicy", + "service": { + "fullName": "google.cloud.netapp.v1.NetApp", + "shortName": "NetApp" + }, + "shortName": "GetBackupPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.netapp_v1.types.GetBackupPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.netapp_v1.types.BackupPolicy", + "shortName": "get_backup_policy" + }, + "description": "Sample for GetBackupPolicy", + "file": "netapp_v1_generated_net_app_get_backup_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "netapp_v1_generated_NetApp_GetBackupPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_create_volume_sync.py" + "title": "netapp_v1_generated_net_app_get_backup_policy_sync.py" }, { "canonical": true, @@ -1081,19 +3536,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.GetBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteActiveDirectory" + "shortName": "GetBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.GetBackupVaultRequest" }, { "name": "name", @@ -1112,22 +3567,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_active_directory" + "resultType": "google.cloud.netapp_v1.types.BackupVault", + "shortName": "get_backup_vault" }, - "description": "Sample for DeleteActiveDirectory", - "file": "netapp_v1_generated_net_app_delete_active_directory_async.py", + "description": "Sample for GetBackupVault", + "file": "netapp_v1_generated_net_app_get_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteActiveDirectory_async", + "regionTag": "netapp_v1_generated_NetApp_GetBackupVault_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1142,17 +3597,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_active_directory_async.py" + "title": "netapp_v1_generated_net_app_get_backup_vault_async.py" }, { "canonical": true, @@ -1161,19 +3616,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.GetBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteActiveDirectory" + "shortName": "GetBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.GetBackupVaultRequest" }, { "name": "name", @@ -1192,22 +3647,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_active_directory" + "resultType": "google.cloud.netapp_v1.types.BackupVault", + "shortName": "get_backup_vault" }, - "description": "Sample for DeleteActiveDirectory", - "file": "netapp_v1_generated_net_app_delete_active_directory_sync.py", + "description": "Sample for GetBackupVault", + "file": "netapp_v1_generated_net_app_get_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteActiveDirectory_sync", + "regionTag": "netapp_v1_generated_NetApp_GetBackupVault_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1222,17 +3677,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_active_directory_sync.py" + "title": "netapp_v1_generated_net_app_get_backup_vault_sync.py" }, { "canonical": true, @@ -1242,19 +3697,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.GetBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteKmsConfig" + "shortName": "GetBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.GetBackupRequest" }, { "name": "name", @@ -1273,22 +3728,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_kms_config" + "resultType": "google.cloud.netapp_v1.types.Backup", + "shortName": "get_backup" }, - "description": "Sample for DeleteKmsConfig", - "file": "netapp_v1_generated_net_app_delete_kms_config_async.py", + "description": "Sample for GetBackup", + "file": "netapp_v1_generated_net_app_get_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_async", + "regionTag": "netapp_v1_generated_NetApp_GetBackup_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1303,17 +3758,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_kms_config_async.py" + "title": "netapp_v1_generated_net_app_get_backup_async.py" }, { "canonical": true, @@ -1322,19 +3777,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.GetBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteKmsConfig" + "shortName": "GetBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.GetBackupRequest" }, { "name": "name", @@ -1353,22 +3808,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_kms_config" + "resultType": "google.cloud.netapp_v1.types.Backup", + "shortName": "get_backup" }, - "description": "Sample for DeleteKmsConfig", - "file": "netapp_v1_generated_net_app_delete_kms_config_sync.py", + "description": "Sample for GetBackup", + "file": "netapp_v1_generated_net_app_get_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteKmsConfig_sync", + "regionTag": "netapp_v1_generated_NetApp_GetBackup_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1383,17 +3838,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_kms_config_sync.py" + "title": "netapp_v1_generated_net_app_get_backup_sync.py" }, { "canonical": true, @@ -1403,19 +3858,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteReplication" + "shortName": "GetKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" }, { "name": "name", @@ -1434,22 +3889,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_replication" + "resultType": "google.cloud.netapp_v1.types.KmsConfig", + "shortName": "get_kms_config" }, - "description": "Sample for DeleteReplication", - "file": "netapp_v1_generated_net_app_delete_replication_async.py", + "description": "Sample for GetKmsConfig", + "file": "netapp_v1_generated_net_app_get_kms_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_async", + "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1464,17 +3919,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_replication_async.py" + "title": "netapp_v1_generated_net_app_get_kms_config_async.py" }, { "canonical": true, @@ -1483,19 +3938,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_kms_config", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteReplication", + "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteReplication" + "shortName": "GetKmsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteReplicationRequest" + "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" }, { "name": "name", @@ -1514,22 +3969,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_replication" + "resultType": "google.cloud.netapp_v1.types.KmsConfig", + "shortName": "get_kms_config" }, - "description": "Sample for DeleteReplication", - "file": "netapp_v1_generated_net_app_delete_replication_sync.py", + "description": "Sample for GetKmsConfig", + "file": "netapp_v1_generated_net_app_get_kms_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1544,17 +3999,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_replication_sync.py" + "title": "netapp_v1_generated_net_app_get_kms_config_sync.py" }, { "canonical": true, @@ -1564,19 +4019,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteSnapshot" + "shortName": "GetReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + "type": "google.cloud.netapp_v1.types.GetReplicationRequest" }, { "name": "name", @@ -1595,22 +4050,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_snapshot" + "resultType": "google.cloud.netapp_v1.types.Replication", + "shortName": "get_replication" }, - "description": "Sample for DeleteSnapshot", - "file": "netapp_v1_generated_net_app_delete_snapshot_async.py", + "description": "Sample for GetReplication", + "file": "netapp_v1_generated_net_app_get_replication_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteSnapshot_async", + "regionTag": "netapp_v1_generated_NetApp_GetReplication_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1625,17 +4080,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_snapshot_async.py" + "title": "netapp_v1_generated_net_app_get_replication_async.py" }, { "canonical": true, @@ -1644,19 +4099,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteSnapshot" + "shortName": "GetReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteSnapshotRequest" + "type": "google.cloud.netapp_v1.types.GetReplicationRequest" }, { "name": "name", @@ -1675,22 +4130,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_snapshot" + "resultType": "google.cloud.netapp_v1.types.Replication", + "shortName": "get_replication" }, - "description": "Sample for DeleteSnapshot", - "file": "netapp_v1_generated_net_app_delete_snapshot_sync.py", + "description": "Sample for GetReplication", + "file": "netapp_v1_generated_net_app_get_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteSnapshot_sync", + "regionTag": "netapp_v1_generated_NetApp_GetReplication_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1705,17 +4160,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_snapshot_sync.py" + "title": "netapp_v1_generated_net_app_get_replication_sync.py" }, { "canonical": true, @@ -1725,19 +4180,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_snapshot", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.GetSnapshot", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteStoragePool" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.GetSnapshotRequest" }, { "name": "name", @@ -1756,22 +4211,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_storage_pool" + "resultType": "google.cloud.netapp_v1.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for DeleteStoragePool", - "file": "netapp_v1_generated_net_app_delete_storage_pool_async.py", + "description": "Sample for GetSnapshot", + "file": "netapp_v1_generated_net_app_get_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteStoragePool_async", + "regionTag": "netapp_v1_generated_NetApp_GetSnapshot_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1786,17 +4241,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_storage_pool_async.py" + "title": "netapp_v1_generated_net_app_get_snapshot_async.py" }, { "canonical": true, @@ -1805,19 +4260,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_snapshot", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.GetSnapshot", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteStoragePool" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.GetSnapshotRequest" }, { "name": "name", @@ -1836,22 +4291,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_storage_pool" + "resultType": "google.cloud.netapp_v1.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for DeleteStoragePool", - "file": "netapp_v1_generated_net_app_delete_storage_pool_sync.py", + "description": "Sample for GetSnapshot", + "file": "netapp_v1_generated_net_app_get_snapshot_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteStoragePool_sync", + "regionTag": "netapp_v1_generated_NetApp_GetSnapshot_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1866,17 +4321,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_storage_pool_sync.py" + "title": "netapp_v1_generated_net_app_get_snapshot_sync.py" }, { "canonical": true, @@ -1886,19 +4341,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.delete_volume", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_storage_pool", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteVolume", + "fullName": "google.cloud.netapp.v1.NetApp.GetStoragePool", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteVolume" + "shortName": "GetStoragePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteVolumeRequest" + "type": "google.cloud.netapp_v1.types.GetStoragePoolRequest" }, { "name": "name", @@ -1917,22 +4372,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_volume" + "resultType": "google.cloud.netapp_v1.types.StoragePool", + "shortName": "get_storage_pool" }, - "description": "Sample for DeleteVolume", - "file": "netapp_v1_generated_net_app_delete_volume_async.py", + "description": "Sample for GetStoragePool", + "file": "netapp_v1_generated_net_app_get_storage_pool_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteVolume_async", + "regionTag": "netapp_v1_generated_NetApp_GetStoragePool_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1947,17 +4402,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_volume_async.py" + "title": "netapp_v1_generated_net_app_get_storage_pool_async.py" }, { "canonical": true, @@ -1966,19 +4421,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.delete_volume", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_storage_pool", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.DeleteVolume", + "fullName": "google.cloud.netapp.v1.NetApp.GetStoragePool", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "DeleteVolume" + "shortName": "GetStoragePool" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.DeleteVolumeRequest" + "type": "google.cloud.netapp_v1.types.GetStoragePoolRequest" }, { "name": "name", @@ -1997,22 +4452,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_volume" + "resultType": "google.cloud.netapp_v1.types.StoragePool", + "shortName": "get_storage_pool" }, - "description": "Sample for DeleteVolume", - "file": "netapp_v1_generated_net_app_delete_volume_sync.py", + "description": "Sample for GetStoragePool", + "file": "netapp_v1_generated_net_app_get_storage_pool_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_DeleteVolume_sync", + "regionTag": "netapp_v1_generated_NetApp_GetStoragePool_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2027,17 +4482,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_delete_volume_sync.py" + "title": "netapp_v1_generated_net_app_get_storage_pool_sync.py" }, { "canonical": true, @@ -2047,19 +4502,23 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.encrypt_volumes", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_volume", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.EncryptVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.GetVolume", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "EncryptVolumes" + "shortName": "GetVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.EncryptVolumesRequest" + "type": "google.cloud.netapp_v1.types.GetVolumeRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2074,22 +4533,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "encrypt_volumes" + "resultType": "google.cloud.netapp_v1.types.Volume", + "shortName": "get_volume" }, - "description": "Sample for EncryptVolumes", - "file": "netapp_v1_generated_net_app_encrypt_volumes_async.py", + "description": "Sample for GetVolume", + "file": "netapp_v1_generated_net_app_get_volume_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_EncryptVolumes_async", + "regionTag": "netapp_v1_generated_NetApp_GetVolume_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2104,17 +4563,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_encrypt_volumes_async.py" + "title": "netapp_v1_generated_net_app_get_volume_async.py" }, { "canonical": true, @@ -2123,19 +4582,23 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.encrypt_volumes", + "fullName": "google.cloud.netapp_v1.NetAppClient.get_volume", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.EncryptVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.GetVolume", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "EncryptVolumes" + "shortName": "GetVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.EncryptVolumesRequest" + "type": "google.cloud.netapp_v1.types.GetVolumeRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2150,22 +4613,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "encrypt_volumes" + "resultType": "google.cloud.netapp_v1.types.Volume", + "shortName": "get_volume" }, - "description": "Sample for EncryptVolumes", - "file": "netapp_v1_generated_net_app_encrypt_volumes_sync.py", + "description": "Sample for GetVolume", + "file": "netapp_v1_generated_net_app_get_volume_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_EncryptVolumes_sync", + "regionTag": "netapp_v1_generated_NetApp_GetVolume_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2180,17 +4643,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_encrypt_volumes_sync.py" + "title": "netapp_v1_generated_net_app_get_volume_sync.py" }, { "canonical": true, @@ -2200,22 +4663,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_active_directories", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.ListActiveDirectories", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetActiveDirectory" + "shortName": "ListActiveDirectories" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.ListActiveDirectoriesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2231,22 +4694,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.ActiveDirectory", - "shortName": "get_active_directory" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListActiveDirectoriesAsyncPager", + "shortName": "list_active_directories" }, - "description": "Sample for GetActiveDirectory", - "file": "netapp_v1_generated_net_app_get_active_directory_async.py", + "description": "Sample for ListActiveDirectories", + "file": "netapp_v1_generated_net_app_list_active_directories_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetActiveDirectory_async", + "regionTag": "netapp_v1_generated_NetApp_ListActiveDirectories_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2266,12 +4729,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_active_directory_async.py" + "title": "netapp_v1_generated_net_app_list_active_directories_async.py" }, { "canonical": true, @@ -2280,22 +4743,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_active_directories", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.ListActiveDirectories", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetActiveDirectory" + "shortName": "ListActiveDirectories" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.ListActiveDirectoriesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2311,22 +4774,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.ActiveDirectory", - "shortName": "get_active_directory" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListActiveDirectoriesPager", + "shortName": "list_active_directories" }, - "description": "Sample for GetActiveDirectory", - "file": "netapp_v1_generated_net_app_get_active_directory_sync.py", + "description": "Sample for ListActiveDirectories", + "file": "netapp_v1_generated_net_app_list_active_directories_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetActiveDirectory_sync", + "regionTag": "netapp_v1_generated_NetApp_ListActiveDirectories_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2346,12 +4809,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_active_directory_sync.py" + "title": "netapp_v1_generated_net_app_list_active_directories_sync.py" }, { "canonical": true, @@ -2361,22 +4824,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_backup_policies", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackupPolicies", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetKmsConfig" + "shortName": "ListBackupPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.ListBackupPoliciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2392,22 +4855,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.KmsConfig", - "shortName": "get_kms_config" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupPoliciesAsyncPager", + "shortName": "list_backup_policies" }, - "description": "Sample for GetKmsConfig", - "file": "netapp_v1_generated_net_app_get_kms_config_async.py", + "description": "Sample for ListBackupPolicies", + "file": "netapp_v1_generated_net_app_list_backup_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_async", + "regionTag": "netapp_v1_generated_NetApp_ListBackupPolicies_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2427,12 +4890,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_kms_config_async.py" + "title": "netapp_v1_generated_net_app_list_backup_policies_async.py" }, { "canonical": true, @@ -2441,22 +4904,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_kms_config", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_backup_policies", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetKmsConfig", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackupPolicies", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetKmsConfig" + "shortName": "ListBackupPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetKmsConfigRequest" + "type": "google.cloud.netapp_v1.types.ListBackupPoliciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2472,22 +4935,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.KmsConfig", - "shortName": "get_kms_config" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupPoliciesPager", + "shortName": "list_backup_policies" }, - "description": "Sample for GetKmsConfig", - "file": "netapp_v1_generated_net_app_get_kms_config_sync.py", + "description": "Sample for ListBackupPolicies", + "file": "netapp_v1_generated_net_app_list_backup_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetKmsConfig_sync", + "regionTag": "netapp_v1_generated_NetApp_ListBackupPolicies_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2507,12 +4970,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_kms_config_sync.py" + "title": "netapp_v1_generated_net_app_list_backup_policies_sync.py" }, { "canonical": true, @@ -2522,22 +4985,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_backup_vaults", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackupVaults", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetReplication" + "shortName": "ListBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + "type": "google.cloud.netapp_v1.types.ListBackupVaultsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2553,22 +5016,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Replication", - "shortName": "get_replication" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" }, - "description": "Sample for GetReplication", - "file": "netapp_v1_generated_net_app_get_replication_async.py", + "description": "Sample for ListBackupVaults", + "file": "netapp_v1_generated_net_app_list_backup_vaults_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetReplication_async", + "regionTag": "netapp_v1_generated_NetApp_ListBackupVaults_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2588,12 +5051,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_replication_async.py" + "title": "netapp_v1_generated_net_app_list_backup_vaults_async.py" }, { "canonical": true, @@ -2602,22 +5065,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_backup_vaults", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetReplication", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackupVaults", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetReplication" + "shortName": "ListBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetReplicationRequest" + "type": "google.cloud.netapp_v1.types.ListBackupVaultsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2633,22 +5096,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Replication", - "shortName": "get_replication" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" }, - "description": "Sample for GetReplication", - "file": "netapp_v1_generated_net_app_get_replication_sync.py", + "description": "Sample for ListBackupVaults", + "file": "netapp_v1_generated_net_app_list_backup_vaults_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_ListBackupVaults_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2668,12 +5131,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_replication_sync.py" + "title": "netapp_v1_generated_net_app_list_backup_vaults_sync.py" }, { "canonical": true, @@ -2683,22 +5146,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_backups", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackups", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetSnapshot" + "shortName": "ListBackups" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetSnapshotRequest" + "type": "google.cloud.netapp_v1.types.ListBackupsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2714,22 +5177,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" }, - "description": "Sample for GetSnapshot", - "file": "netapp_v1_generated_net_app_get_snapshot_async.py", + "description": "Sample for ListBackups", + "file": "netapp_v1_generated_net_app_list_backups_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetSnapshot_async", + "regionTag": "netapp_v1_generated_NetApp_ListBackups_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2749,12 +5212,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_snapshot_async.py" + "title": "netapp_v1_generated_net_app_list_backups_async.py" }, { "canonical": true, @@ -2763,22 +5226,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_snapshot", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_backups", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetSnapshot", + "fullName": "google.cloud.netapp.v1.NetApp.ListBackups", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetSnapshot" + "shortName": "ListBackups" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetSnapshotRequest" + "type": "google.cloud.netapp_v1.types.ListBackupsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2794,22 +5257,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListBackupsPager", + "shortName": "list_backups" }, - "description": "Sample for GetSnapshot", - "file": "netapp_v1_generated_net_app_get_snapshot_sync.py", + "description": "Sample for ListBackups", + "file": "netapp_v1_generated_net_app_list_backups_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetSnapshot_sync", + "regionTag": "netapp_v1_generated_NetApp_ListBackups_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2829,12 +5292,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_snapshot_sync.py" + "title": "netapp_v1_generated_net_app_list_backups_sync.py" }, { "canonical": true, @@ -2844,22 +5307,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_kms_configs", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.ListKmsConfigs", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetStoragePool" + "shortName": "ListKmsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.ListKmsConfigsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2875,22 +5338,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.StoragePool", - "shortName": "get_storage_pool" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListKmsConfigsAsyncPager", + "shortName": "list_kms_configs" }, - "description": "Sample for GetStoragePool", - "file": "netapp_v1_generated_net_app_get_storage_pool_async.py", + "description": "Sample for ListKmsConfigs", + "file": "netapp_v1_generated_net_app_list_kms_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetStoragePool_async", + "regionTag": "netapp_v1_generated_NetApp_ListKmsConfigs_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2910,12 +5373,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_storage_pool_async.py" + "title": "netapp_v1_generated_net_app_list_kms_configs_async.py" }, { "canonical": true, @@ -2924,22 +5387,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_storage_pool", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_kms_configs", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetStoragePool", + "fullName": "google.cloud.netapp.v1.NetApp.ListKmsConfigs", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetStoragePool" + "shortName": "ListKmsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetStoragePoolRequest" + "type": "google.cloud.netapp_v1.types.ListKmsConfigsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2955,22 +5418,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.StoragePool", - "shortName": "get_storage_pool" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListKmsConfigsPager", + "shortName": "list_kms_configs" }, - "description": "Sample for GetStoragePool", - "file": "netapp_v1_generated_net_app_get_storage_pool_sync.py", + "description": "Sample for ListKmsConfigs", + "file": "netapp_v1_generated_net_app_list_kms_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetStoragePool_sync", + "regionTag": "netapp_v1_generated_NetApp_ListKmsConfigs_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2990,12 +5453,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_storage_pool_sync.py" + "title": "netapp_v1_generated_net_app_list_kms_configs_sync.py" }, { "canonical": true, @@ -3005,22 +5468,22 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.get_volume", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_replications", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetVolume", + "fullName": "google.cloud.netapp.v1.NetApp.ListReplications", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetVolume" + "shortName": "ListReplications" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetVolumeRequest" + "type": "google.cloud.netapp_v1.types.ListReplicationsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3036,22 +5499,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Volume", - "shortName": "get_volume" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListReplicationsAsyncPager", + "shortName": "list_replications" }, - "description": "Sample for GetVolume", - "file": "netapp_v1_generated_net_app_get_volume_async.py", + "description": "Sample for ListReplications", + "file": "netapp_v1_generated_net_app_list_replications_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetVolume_async", + "regionTag": "netapp_v1_generated_NetApp_ListReplications_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3071,12 +5534,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_volume_async.py" + "title": "netapp_v1_generated_net_app_list_replications_async.py" }, { "canonical": true, @@ -3085,22 +5548,22 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.get_volume", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_replications", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.GetVolume", + "fullName": "google.cloud.netapp.v1.NetApp.ListReplications", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "GetVolume" + "shortName": "ListReplications" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.GetVolumeRequest" + "type": "google.cloud.netapp_v1.types.ListReplicationsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3116,22 +5579,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.types.Volume", - "shortName": "get_volume" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListReplicationsPager", + "shortName": "list_replications" }, - "description": "Sample for GetVolume", - "file": "netapp_v1_generated_net_app_get_volume_sync.py", + "description": "Sample for ListReplications", + "file": "netapp_v1_generated_net_app_list_replications_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_GetVolume_sync", + "regionTag": "netapp_v1_generated_NetApp_ListReplications_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3151,12 +5614,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_get_volume_sync.py" + "title": "netapp_v1_generated_net_app_list_replications_sync.py" }, { "canonical": true, @@ -3166,19 +5629,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_active_directories", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_snapshots", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListActiveDirectories", + "fullName": "google.cloud.netapp.v1.NetApp.ListSnapshots", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListActiveDirectories" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListActiveDirectoriesRequest" + "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" }, { "name": "parent", @@ -3197,14 +5660,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListActiveDirectoriesAsyncPager", - "shortName": "list_active_directories" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsAsyncPager", + "shortName": "list_snapshots" }, - "description": "Sample for ListActiveDirectories", - "file": "netapp_v1_generated_net_app_list_active_directories_async.py", + "description": "Sample for ListSnapshots", + "file": "netapp_v1_generated_net_app_list_snapshots_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListActiveDirectories_async", + "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_async", "segments": [ { "end": 52, @@ -3237,7 +5700,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_active_directories_async.py" + "title": "netapp_v1_generated_net_app_list_snapshots_async.py" }, { "canonical": true, @@ -3246,19 +5709,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_active_directories", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_snapshots", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListActiveDirectories", + "fullName": "google.cloud.netapp.v1.NetApp.ListSnapshots", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListActiveDirectories" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListActiveDirectoriesRequest" + "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" }, { "name": "parent", @@ -3277,14 +5740,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListActiveDirectoriesPager", - "shortName": "list_active_directories" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" }, - "description": "Sample for ListActiveDirectories", - "file": "netapp_v1_generated_net_app_list_active_directories_sync.py", + "description": "Sample for ListSnapshots", + "file": "netapp_v1_generated_net_app_list_snapshots_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListActiveDirectories_sync", + "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_sync", "segments": [ { "end": 52, @@ -3317,7 +5780,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_active_directories_sync.py" + "title": "netapp_v1_generated_net_app_list_snapshots_sync.py" }, { "canonical": true, @@ -3327,19 +5790,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_kms_configs", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_storage_pools", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListKmsConfigs", + "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListKmsConfigs" + "shortName": "ListStoragePools" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListKmsConfigsRequest" + "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" }, { "name": "parent", @@ -3358,14 +5821,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListKmsConfigsAsyncPager", - "shortName": "list_kms_configs" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsAsyncPager", + "shortName": "list_storage_pools" }, - "description": "Sample for ListKmsConfigs", - "file": "netapp_v1_generated_net_app_list_kms_configs_async.py", + "description": "Sample for ListStoragePools", + "file": "netapp_v1_generated_net_app_list_storage_pools_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListKmsConfigs_async", + "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_async", "segments": [ { "end": 52, @@ -3398,7 +5861,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_kms_configs_async.py" + "title": "netapp_v1_generated_net_app_list_storage_pools_async.py" }, { "canonical": true, @@ -3407,19 +5870,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_kms_configs", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_storage_pools", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListKmsConfigs", + "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListKmsConfigs" + "shortName": "ListStoragePools" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListKmsConfigsRequest" + "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" }, { "name": "parent", @@ -3438,14 +5901,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListKmsConfigsPager", - "shortName": "list_kms_configs" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsPager", + "shortName": "list_storage_pools" }, - "description": "Sample for ListKmsConfigs", - "file": "netapp_v1_generated_net_app_list_kms_configs_sync.py", + "description": "Sample for ListStoragePools", + "file": "netapp_v1_generated_net_app_list_storage_pools_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListKmsConfigs_sync", + "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_sync", "segments": [ { "end": 52, @@ -3478,7 +5941,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_kms_configs_sync.py" + "title": "netapp_v1_generated_net_app_list_storage_pools_sync.py" }, { "canonical": true, @@ -3488,19 +5951,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_replications", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_volumes", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListReplications", + "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListReplications" + "shortName": "ListVolumes" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListReplicationsRequest" + "type": "google.cloud.netapp_v1.types.ListVolumesRequest" }, { "name": "parent", @@ -3519,14 +5982,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListReplicationsAsyncPager", - "shortName": "list_replications" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesAsyncPager", + "shortName": "list_volumes" }, - "description": "Sample for ListReplications", - "file": "netapp_v1_generated_net_app_list_replications_async.py", + "description": "Sample for ListVolumes", + "file": "netapp_v1_generated_net_app_list_volumes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListReplications_async", + "regionTag": "netapp_v1_generated_NetApp_ListVolumes_async", "segments": [ { "end": 52, @@ -3559,7 +6022,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_replications_async.py" + "title": "netapp_v1_generated_net_app_list_volumes_async.py" }, { "canonical": true, @@ -3568,19 +6031,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_replications", + "fullName": "google.cloud.netapp_v1.NetAppClient.list_volumes", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListReplications", + "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListReplications" + "shortName": "ListVolumes" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListReplicationsRequest" + "type": "google.cloud.netapp_v1.types.ListVolumesRequest" }, { "name": "parent", @@ -3599,14 +6062,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListReplicationsPager", - "shortName": "list_replications" + "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesPager", + "shortName": "list_volumes" }, - "description": "Sample for ListReplications", - "file": "netapp_v1_generated_net_app_list_replications_sync.py", + "description": "Sample for ListVolumes", + "file": "netapp_v1_generated_net_app_list_volumes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListReplications_sync", + "regionTag": "netapp_v1_generated_NetApp_ListVolumes_sync", "segments": [ { "end": 52, @@ -3639,7 +6102,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_replications_sync.py" + "title": "netapp_v1_generated_net_app_list_volumes_sync.py" }, { "canonical": true, @@ -3649,23 +6112,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_snapshots", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.resume_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListSnapshots", + "fullName": "google.cloud.netapp.v1.NetApp.ResumeReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListSnapshots" + "shortName": "ResumeReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.ResumeReplicationRequest" }, { "name": "retry", @@ -3680,22 +6139,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsAsyncPager", - "shortName": "list_snapshots" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_replication" }, - "description": "Sample for ListSnapshots", - "file": "netapp_v1_generated_net_app_list_snapshots_async.py", + "description": "Sample for ResumeReplication", + "file": "netapp_v1_generated_net_app_resume_replication_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_async", + "regionTag": "netapp_v1_generated_NetApp_ResumeReplication_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3710,17 +6169,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_snapshots_async.py" + "title": "netapp_v1_generated_net_app_resume_replication_async.py" }, { "canonical": true, @@ -3729,23 +6188,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_snapshots", + "fullName": "google.cloud.netapp_v1.NetAppClient.resume_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListSnapshots", + "fullName": "google.cloud.netapp.v1.NetApp.ResumeReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListSnapshots" + "shortName": "ResumeReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListSnapshotsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.ResumeReplicationRequest" }, { "name": "retry", @@ -3760,22 +6215,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListSnapshotsPager", - "shortName": "list_snapshots" + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_replication" }, - "description": "Sample for ListSnapshots", - "file": "netapp_v1_generated_net_app_list_snapshots_sync.py", + "description": "Sample for ResumeReplication", + "file": "netapp_v1_generated_net_app_resume_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListSnapshots_sync", + "regionTag": "netapp_v1_generated_NetApp_ResumeReplication_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3790,17 +6245,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_snapshots_sync.py" + "title": "netapp_v1_generated_net_app_resume_replication_sync.py" }, { "canonical": true, @@ -3810,23 +6265,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_storage_pools", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.reverse_replication_direction", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "fullName": "google.cloud.netapp.v1.NetApp.ReverseReplicationDirection", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListStoragePools" + "shortName": "ReverseReplicationDirection" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.ReverseReplicationDirectionRequest" }, { "name": "retry", @@ -3841,22 +6292,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsAsyncPager", - "shortName": "list_storage_pools" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reverse_replication_direction" }, - "description": "Sample for ListStoragePools", - "file": "netapp_v1_generated_net_app_list_storage_pools_async.py", + "description": "Sample for ReverseReplicationDirection", + "file": "netapp_v1_generated_net_app_reverse_replication_direction_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_async", + "regionTag": "netapp_v1_generated_NetApp_ReverseReplicationDirection_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3871,17 +6322,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_storage_pools_async.py" + "title": "netapp_v1_generated_net_app_reverse_replication_direction_async.py" }, { "canonical": true, @@ -3890,23 +6341,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_storage_pools", + "fullName": "google.cloud.netapp_v1.NetAppClient.reverse_replication_direction", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListStoragePools", + "fullName": "google.cloud.netapp.v1.NetApp.ReverseReplicationDirection", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListStoragePools" + "shortName": "ReverseReplicationDirection" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListStoragePoolsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.ReverseReplicationDirectionRequest" }, { "name": "retry", @@ -3921,22 +6368,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListStoragePoolsPager", - "shortName": "list_storage_pools" + "resultType": "google.api_core.operation.Operation", + "shortName": "reverse_replication_direction" }, - "description": "Sample for ListStoragePools", - "file": "netapp_v1_generated_net_app_list_storage_pools_sync.py", + "description": "Sample for ReverseReplicationDirection", + "file": "netapp_v1_generated_net_app_reverse_replication_direction_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListStoragePools_sync", + "regionTag": "netapp_v1_generated_NetApp_ReverseReplicationDirection_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3951,17 +6398,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_storage_pools_sync.py" + "title": "netapp_v1_generated_net_app_reverse_replication_direction_sync.py" }, { "canonical": true, @@ -3971,23 +6418,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.list_volumes", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.revert_volume", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.RevertVolume", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListVolumes" + "shortName": "RevertVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.RevertVolumeRequest" }, { "name": "retry", @@ -4002,22 +6445,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesAsyncPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "revert_volume" }, - "description": "Sample for ListVolumes", - "file": "netapp_v1_generated_net_app_list_volumes_async.py", + "description": "Sample for RevertVolume", + "file": "netapp_v1_generated_net_app_revert_volume_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListVolumes_async", + "regionTag": "netapp_v1_generated_NetApp_RevertVolume_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -4027,22 +6470,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_volumes_async.py" + "title": "netapp_v1_generated_net_app_revert_volume_async.py" }, { "canonical": true, @@ -4051,23 +6494,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.list_volumes", + "fullName": "google.cloud.netapp_v1.NetAppClient.revert_volume", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ListVolumes", + "fullName": "google.cloud.netapp.v1.NetApp.RevertVolume", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ListVolumes" + "shortName": "RevertVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.netapp_v1.types.RevertVolumeRequest" }, { "name": "retry", @@ -4082,22 +6521,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.netapp_v1.services.net_app.pagers.ListVolumesPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation.Operation", + "shortName": "revert_volume" }, - "description": "Sample for ListVolumes", - "file": "netapp_v1_generated_net_app_list_volumes_sync.py", + "description": "Sample for RevertVolume", + "file": "netapp_v1_generated_net_app_revert_volume_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ListVolumes_sync", + "regionTag": "netapp_v1_generated_NetApp_RevertVolume_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -4107,22 +6546,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_list_volumes_sync.py" + "title": "netapp_v1_generated_net_app_revert_volume_sync.py" }, { "canonical": true, @@ -4132,19 +6571,19 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.resume_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.stop_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ResumeReplication", + "fullName": "google.cloud.netapp.v1.NetApp.StopReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ResumeReplication" + "shortName": "StopReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ResumeReplicationRequest" + "type": "google.cloud.netapp_v1.types.StopReplicationRequest" }, { "name": "retry", @@ -4160,13 +6599,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resume_replication" + "shortName": "stop_replication" }, - "description": "Sample for ResumeReplication", - "file": "netapp_v1_generated_net_app_resume_replication_async.py", + "description": "Sample for StopReplication", + "file": "netapp_v1_generated_net_app_stop_replication_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ResumeReplication_async", + "regionTag": "netapp_v1_generated_NetApp_StopReplication_async", "segments": [ { "end": 55, @@ -4199,7 +6638,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_resume_replication_async.py" + "title": "netapp_v1_generated_net_app_stop_replication_async.py" }, { "canonical": true, @@ -4208,19 +6647,19 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.resume_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.stop_replication", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ResumeReplication", + "fullName": "google.cloud.netapp.v1.NetApp.StopReplication", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ResumeReplication" + "shortName": "StopReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ResumeReplicationRequest" + "type": "google.cloud.netapp_v1.types.StopReplicationRequest" }, { "name": "retry", @@ -4236,13 +6675,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "resume_replication" + "shortName": "stop_replication" }, - "description": "Sample for ResumeReplication", - "file": "netapp_v1_generated_net_app_resume_replication_sync.py", + "description": "Sample for StopReplication", + "file": "netapp_v1_generated_net_app_stop_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ResumeReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_StopReplication_sync", "segments": [ { "end": 55, @@ -4275,7 +6714,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_resume_replication_sync.py" + "title": "netapp_v1_generated_net_app_stop_replication_sync.py" }, { "canonical": true, @@ -4285,19 +6724,27 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.reverse_replication_direction", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_active_directory", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ReverseReplicationDirection", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateActiveDirectory", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ReverseReplicationDirection" + "shortName": "UpdateActiveDirectory" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ReverseReplicationDirectionRequest" + "type": "google.cloud.netapp_v1.types.UpdateActiveDirectoryRequest" + }, + { + "name": "active_directory", + "type": "google.cloud.netapp_v1.types.ActiveDirectory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4313,21 +6760,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reverse_replication_direction" + "shortName": "update_active_directory" }, - "description": "Sample for ReverseReplicationDirection", - "file": "netapp_v1_generated_net_app_reverse_replication_direction_async.py", + "description": "Sample for UpdateActiveDirectory", + "file": "netapp_v1_generated_net_app_update_active_directory_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ReverseReplicationDirection_async", + "regionTag": "netapp_v1_generated_NetApp_UpdateActiveDirectory_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, @@ -4337,22 +6784,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_reverse_replication_direction_async.py" + "title": "netapp_v1_generated_net_app_update_active_directory_async.py" }, { "canonical": true, @@ -4361,19 +6808,27 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.reverse_replication_direction", + "fullName": "google.cloud.netapp_v1.NetAppClient.update_active_directory", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.ReverseReplicationDirection", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateActiveDirectory", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "ReverseReplicationDirection" + "shortName": "UpdateActiveDirectory" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.ReverseReplicationDirectionRequest" + "type": "google.cloud.netapp_v1.types.UpdateActiveDirectoryRequest" + }, + { + "name": "active_directory", + "type": "google.cloud.netapp_v1.types.ActiveDirectory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4389,21 +6844,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "reverse_replication_direction" + "shortName": "update_active_directory" }, - "description": "Sample for ReverseReplicationDirection", - "file": "netapp_v1_generated_net_app_reverse_replication_direction_sync.py", + "description": "Sample for UpdateActiveDirectory", + "file": "netapp_v1_generated_net_app_update_active_directory_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_ReverseReplicationDirection_sync", + "regionTag": "netapp_v1_generated_NetApp_UpdateActiveDirectory_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, @@ -4413,22 +6868,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_reverse_replication_direction_sync.py" + "title": "netapp_v1_generated_net_app_update_active_directory_sync.py" }, { "canonical": true, @@ -4438,19 +6893,27 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.revert_volume", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_backup_policy", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.RevertVolume", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackupPolicy", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "RevertVolume" + "shortName": "UpdateBackupPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.RevertVolumeRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupPolicyRequest" + }, + { + "name": "backup_policy", + "type": "google.cloud.netapp_v1.types.BackupPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4466,21 +6929,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "revert_volume" + "shortName": "update_backup_policy" }, - "description": "Sample for RevertVolume", - "file": "netapp_v1_generated_net_app_revert_volume_async.py", + "description": "Sample for UpdateBackupPolicy", + "file": "netapp_v1_generated_net_app_update_backup_policy_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_RevertVolume_async", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackupPolicy_async", "segments": [ { - "end": 56, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4490,22 +6953,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_revert_volume_async.py" + "title": "netapp_v1_generated_net_app_update_backup_policy_async.py" }, { "canonical": true, @@ -4514,19 +6977,27 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.revert_volume", + "fullName": "google.cloud.netapp_v1.NetAppClient.update_backup_policy", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.RevertVolume", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackupPolicy", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "RevertVolume" + "shortName": "UpdateBackupPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.RevertVolumeRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupPolicyRequest" + }, + { + "name": "backup_policy", + "type": "google.cloud.netapp_v1.types.BackupPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4542,21 +7013,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "revert_volume" + "shortName": "update_backup_policy" }, - "description": "Sample for RevertVolume", - "file": "netapp_v1_generated_net_app_revert_volume_sync.py", + "description": "Sample for UpdateBackupPolicy", + "file": "netapp_v1_generated_net_app_update_backup_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_RevertVolume_sync", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackupPolicy_sync", "segments": [ { - "end": 56, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4566,22 +7037,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_revert_volume_sync.py" + "title": "netapp_v1_generated_net_app_update_backup_policy_sync.py" }, { "canonical": true, @@ -4591,19 +7062,27 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.stop_replication", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.StopReplication", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "StopReplication" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.StopReplicationRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupVaultRequest" + }, + { + "name": "backup_vault", + "type": "google.cloud.netapp_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4619,21 +7098,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "stop_replication" + "shortName": "update_backup_vault" }, - "description": "Sample for StopReplication", - "file": "netapp_v1_generated_net_app_stop_replication_async.py", + "description": "Sample for UpdateBackupVault", + "file": "netapp_v1_generated_net_app_update_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_StopReplication_async", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackupVault_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4643,22 +7122,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_stop_replication_async.py" + "title": "netapp_v1_generated_net_app_update_backup_vault_async.py" }, { "canonical": true, @@ -4667,19 +7146,27 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.stop_replication", + "fullName": "google.cloud.netapp_v1.NetAppClient.update_backup_vault", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.StopReplication", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackupVault", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "StopReplication" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.StopReplicationRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupVaultRequest" + }, + { + "name": "backup_vault", + "type": "google.cloud.netapp_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4695,21 +7182,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "stop_replication" + "shortName": "update_backup_vault" }, - "description": "Sample for StopReplication", - "file": "netapp_v1_generated_net_app_stop_replication_sync.py", + "description": "Sample for UpdateBackupVault", + "file": "netapp_v1_generated_net_app_update_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_StopReplication_sync", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackupVault_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4719,22 +7206,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_stop_replication_sync.py" + "title": "netapp_v1_generated_net_app_update_backup_vault_sync.py" }, { "canonical": true, @@ -4744,23 +7231,23 @@ "fullName": "google.cloud.netapp_v1.NetAppAsyncClient", "shortName": "NetAppAsyncClient" }, - "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppAsyncClient.update_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.UpdateActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "UpdateActiveDirectory" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.UpdateActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupRequest" }, { - "name": "active_directory", - "type": "google.cloud.netapp_v1.types.ActiveDirectory" + "name": "backup", + "type": "google.cloud.netapp_v1.types.Backup" }, { "name": "update_mask", @@ -4780,21 +7267,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_active_directory" + "shortName": "update_backup" }, - "description": "Sample for UpdateActiveDirectory", - "file": "netapp_v1_generated_net_app_update_active_directory_async.py", + "description": "Sample for UpdateBackup", + "file": "netapp_v1_generated_net_app_update_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_UpdateActiveDirectory_async", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackup_async", "segments": [ { - "end": 62, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4804,22 +7291,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_update_active_directory_async.py" + "title": "netapp_v1_generated_net_app_update_backup_async.py" }, { "canonical": true, @@ -4828,23 +7315,23 @@ "fullName": "google.cloud.netapp_v1.NetAppClient", "shortName": "NetAppClient" }, - "fullName": "google.cloud.netapp_v1.NetAppClient.update_active_directory", + "fullName": "google.cloud.netapp_v1.NetAppClient.update_backup", "method": { - "fullName": "google.cloud.netapp.v1.NetApp.UpdateActiveDirectory", + "fullName": "google.cloud.netapp.v1.NetApp.UpdateBackup", "service": { "fullName": "google.cloud.netapp.v1.NetApp", "shortName": "NetApp" }, - "shortName": "UpdateActiveDirectory" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.netapp_v1.types.UpdateActiveDirectoryRequest" + "type": "google.cloud.netapp_v1.types.UpdateBackupRequest" }, { - "name": "active_directory", - "type": "google.cloud.netapp_v1.types.ActiveDirectory" + "name": "backup", + "type": "google.cloud.netapp_v1.types.Backup" }, { "name": "update_mask", @@ -4864,21 +7351,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_active_directory" + "shortName": "update_backup" }, - "description": "Sample for UpdateActiveDirectory", - "file": "netapp_v1_generated_net_app_update_active_directory_sync.py", + "description": "Sample for UpdateBackup", + "file": "netapp_v1_generated_net_app_update_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "netapp_v1_generated_NetApp_UpdateActiveDirectory_sync", + "regionTag": "netapp_v1_generated_NetApp_UpdateBackup_sync", "segments": [ { - "end": 62, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4888,22 +7375,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "netapp_v1_generated_net_app_update_active_directory_sync.py" + "title": "netapp_v1_generated_net_app_update_backup_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py index 469132ffc186..0860cef12e06 100644 --- a/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py +++ b/packages/google-cloud-netapp/scripts/fixup_netapp_v1_keywords.py @@ -40,12 +40,18 @@ class netappCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_active_directory': ('parent', 'active_directory', 'active_directory_id', ), + 'create_backup': ('parent', 'backup_id', 'backup', ), + 'create_backup_policy': ('parent', 'backup_policy', 'backup_policy_id', ), + 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', ), 'create_kms_config': ('parent', 'kms_config_id', 'kms_config', ), 'create_replication': ('parent', 'replication', 'replication_id', ), 'create_snapshot': ('parent', 'snapshot', 'snapshot_id', ), 'create_storage_pool': ('parent', 'storage_pool_id', 'storage_pool', ), 'create_volume': ('parent', 'volume_id', 'volume', ), 'delete_active_directory': ('name', ), + 'delete_backup': ('name', ), + 'delete_backup_policy': ('name', ), + 'delete_backup_vault': ('name', ), 'delete_kms_config': ('name', ), 'delete_replication': ('name', ), 'delete_snapshot': ('name', ), @@ -53,12 +59,18 @@ class netappCallTransformer(cst.CSTTransformer): 'delete_volume': ('name', 'force', ), 'encrypt_volumes': ('name', ), 'get_active_directory': ('name', ), + 'get_backup': ('name', ), + 'get_backup_policy': ('name', ), + 'get_backup_vault': ('name', ), 'get_kms_config': ('name', ), 'get_replication': ('name', ), 'get_snapshot': ('name', ), 'get_storage_pool': ('name', ), 'get_volume': ('name', ), 'list_active_directories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backup_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), 'list_kms_configs': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), 'list_replications': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), 'list_snapshots': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), @@ -69,6 +81,9 @@ class netappCallTransformer(cst.CSTTransformer): 'revert_volume': ('name', 'snapshot_id', ), 'stop_replication': ('name', 'force', ), 'update_active_directory': ('update_mask', 'active_directory', ), + 'update_backup': ('update_mask', 'backup', ), + 'update_backup_policy': ('update_mask', 'backup_policy', ), + 'update_backup_vault': ('update_mask', 'backup_vault', ), 'update_kms_config': ('update_mask', 'kms_config', ), 'update_replication': ('update_mask', 'replication', ), 'update_snapshot': ('update_mask', 'snapshot', ), diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index 7f229fe6f71c..13b3d196dd7b 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -65,6 +65,12 @@ ) from google.cloud.netapp_v1.types import active_directory as gcn_active_directory from google.cloud.netapp_v1.types import active_directory +from google.cloud.netapp_v1.types import backup +from google.cloud.netapp_v1.types import backup as gcn_backup +from google.cloud.netapp_v1.types import backup_policy +from google.cloud.netapp_v1.types import backup_policy as gcn_backup_policy +from google.cloud.netapp_v1.types import backup_vault +from google.cloud.netapp_v1.types import backup_vault as gcn_backup_vault from google.cloud.netapp_v1.types import cloud_netapp_service, common, kms from google.cloud.netapp_v1.types import replication from google.cloud.netapp_v1.types import replication as gcn_replication @@ -10335,6 +10341,4312 @@ async def test_reverse_replication_direction_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_vault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.CreateBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.CreateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_create_backup_vault_async( + transport: str = "grpc_asyncio", + request_type=gcn_backup_vault.CreateBackupVaultRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.CreateBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_vault_async_from_dict(): + await test_create_backup_vault_async(request_type=dict) + + +def test_create_backup_vault_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_vault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_vault_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_vault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_vault_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_vault( + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = gcn_backup_vault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +def test_create_backup_vault_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + gcn_backup_vault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_vault( + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = gcn_backup_vault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_vault( + gcn_backup_vault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_vault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.BackupVault( + name="name_value", + state=backup_vault.BackupVault.State.CREATING, + description="description_value", + ) + response = client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.GetBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_vault.BackupVault) + assert response.name == "name_value" + assert response.state == backup_vault.BackupVault.State.CREATING + assert response.description == "description_value" + + +def test_get_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.GetBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_get_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backup_vault.GetBackupVaultRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.BackupVault( + name="name_value", + state=backup_vault.BackupVault.State.CREATING, + description="description_value", + ) + ) + response = await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.GetBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_vault.BackupVault) + assert response.name == "name_value" + assert response.state == backup_vault.BackupVault.State.CREATING + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_from_dict(): + await test_get_backup_vault_async(request_type=dict) + + +def test_get_backup_vault_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backup_vault.BackupVault() + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_vault_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.BackupVault() + ) + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_vault_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.BackupVault() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_vault_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backup_vault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.BackupVault() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.BackupVault() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_vault( + backup_vault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_vault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.ListBackupVaultsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.ListBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async( + transport: str = "grpc_asyncio", request_type=backup_vault.ListBackupVaultsRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.ListBackupVaultsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_from_dict(): + await test_list_backup_vaults_async(request_type=dict) + + +def test_list_backup_vaults_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backup_vault.ListBackupVaultsResponse() + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.ListBackupVaultsResponse() + ) + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_vaults_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.ListBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_vaults_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backup_vault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_vault.ListBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_vault.ListBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_vaults( + backup_vault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_pager(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_vaults(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_vault.BackupVault) for i in results) + + +def test_list_backup_vaults_pages(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pager(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup_vault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pages(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_vault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.UpdateBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.UpdateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_update_backup_vault_async( + transport: str = "grpc_asyncio", + request_type=gcn_backup_vault.UpdateBackupVaultRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_vault.UpdateBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_from_dict(): + await test_update_backup_vault_async(request_type=dict) + + +def test_update_backup_vault_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_vault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_vault_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_vault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_vault_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_vault( + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = gcn_backup_vault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_vault_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + gcn_backup_vault.UpdateBackupVaultRequest(), + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_vault( + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = gcn_backup_vault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_vault( + gcn_backup_vault.UpdateBackupVaultRequest(), + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_vault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.DeleteBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.DeleteBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backup_vault.DeleteBackupVaultRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_vault.DeleteBackupVaultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_from_dict(): + await test_delete_backup_vault_async(request_type=dict) + + +def test_delete_backup_vault_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_vault_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_vault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_vault_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_vault_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backup_vault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_vault( + backup_vault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup.CreateBackupRequest, + dict, + ], +) +def test_create_backup(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=gcn_backup.CreateBackupRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup.CreateBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = gcn_backup.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gcn_backup.CreateBackupRequest(), + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup + mock_val = gcn_backup.Backup(name="name_value") + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + gcn_backup.CreateBackupRequest(), + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup( + name="name_value", + state=backup.Backup.State.CREATING, + description="description_value", + volume_usage_bytes=1938, + backup_type=backup.Backup.Type.MANUAL, + source_volume="source_volume_value", + source_snapshot="source_snapshot_value", + chain_storage_bytes=2013, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == "name_value" + assert response.state == backup.Backup.State.CREATING + assert response.description == "description_value" + assert response.volume_usage_bytes == 1938 + assert response.backup_type == backup.Backup.Type.MANUAL + assert response.source_volume == "source_volume_value" + assert response.source_snapshot == "source_snapshot_value" + assert response.chain_storage_bytes == 2013 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backup.GetBackupRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup.Backup( + name="name_value", + state=backup.Backup.State.CREATING, + description="description_value", + volume_usage_bytes=1938, + backup_type=backup.Backup.Type.MANUAL, + source_volume="source_volume_value", + source_snapshot="source_snapshot_value", + chain_storage_bytes=2013, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.name == "name_value" + assert response.state == backup.Backup.State.CREATING + assert response.description == "description_value" + assert response.volume_usage_bytes == 1938 + assert response.backup_type == backup.Backup.Type.MANUAL + assert response.source_volume == "source_volume_value" + assert response.source_snapshot == "source_snapshot_value" + assert response.chain_storage_bytes == 2013 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backup.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backup.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=backup.ListBackupsRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backup.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token="abc", + ), + backup.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token="ghi", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token="abc", + ), + backup.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token="ghi", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token="abc", + ), + backup.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token="ghi", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token="abc", + ), + backup.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token="ghi", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backup.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backup.DeleteBackupRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backup.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=gcn_backup.UpdateBackupRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gcn_backup.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gcn_backup.UpdateBackupRequest(), + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gcn_backup.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + gcn_backup.UpdateBackupRequest(), + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_policy.CreateBackupPolicyRequest, + dict, + ], +) +def test_create_backup_policy(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.CreateBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + client.create_backup_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.CreateBackupPolicyRequest() + + +@pytest.mark.asyncio +async def test_create_backup_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_backup_policy.CreateBackupPolicyRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.CreateBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_policy_async_from_dict(): + await test_create_backup_policy_async(request_type=dict) + + +def test_create_backup_policy_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_policy.CreateBackupPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_policy_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_policy.CreateBackupPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_policy_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_policy( + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_policy + mock_val = gcn_backup_policy.BackupPolicy(name="name_value") + assert arg == mock_val + arg = args[0].backup_policy_id + mock_val = "backup_policy_id_value" + assert arg == mock_val + + +def test_create_backup_policy_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_policy( + gcn_backup_policy.CreateBackupPolicyRequest(), + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_policy_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_policy( + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_policy + mock_val = gcn_backup_policy.BackupPolicy(name="name_value") + assert arg == mock_val + arg = args[0].backup_policy_id + mock_val = "backup_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_policy_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_policy( + gcn_backup_policy.CreateBackupPolicyRequest(), + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_policy.GetBackupPolicyRequest, + dict, + ], +) +def test_get_backup_policy(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.BackupPolicy( + name="name_value", + daily_backup_limit=1894, + weekly_backup_limit=2020, + monthly_backup_limit=2142, + description="description_value", + enabled=True, + assigned_volume_count=2253, + state=backup_policy.BackupPolicy.State.CREATING, + ) + response = client.get_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.GetBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_policy.BackupPolicy) + assert response.name == "name_value" + assert response.daily_backup_limit == 1894 + assert response.weekly_backup_limit == 2020 + assert response.monthly_backup_limit == 2142 + assert response.description == "description_value" + assert response.enabled is True + assert response.assigned_volume_count == 2253 + assert response.state == backup_policy.BackupPolicy.State.CREATING + + +def test_get_backup_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + client.get_backup_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.GetBackupPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_backup_policy_async( + transport: str = "grpc_asyncio", request_type=backup_policy.GetBackupPolicyRequest +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.BackupPolicy( + name="name_value", + daily_backup_limit=1894, + weekly_backup_limit=2020, + monthly_backup_limit=2142, + description="description_value", + enabled=True, + assigned_volume_count=2253, + state=backup_policy.BackupPolicy.State.CREATING, + ) + ) + response = await client.get_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.GetBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_policy.BackupPolicy) + assert response.name == "name_value" + assert response.daily_backup_limit == 1894 + assert response.weekly_backup_limit == 2020 + assert response.monthly_backup_limit == 2142 + assert response.description == "description_value" + assert response.enabled is True + assert response.assigned_volume_count == 2253 + assert response.state == backup_policy.BackupPolicy.State.CREATING + + +@pytest.mark.asyncio +async def test_get_backup_policy_async_from_dict(): + await test_get_backup_policy_async(request_type=dict) + + +def test_get_backup_policy_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.GetBackupPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + call.return_value = backup_policy.BackupPolicy() + client.get_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_policy_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.GetBackupPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.BackupPolicy() + ) + await client.get_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_policy_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.BackupPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_policy_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_policy( + backup_policy.GetBackupPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_policy_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.BackupPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.BackupPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_policy_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_policy( + backup_policy.GetBackupPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_policy.ListBackupPoliciesRequest, + dict, + ], +) +def test_list_backup_policies(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.ListBackupPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.ListBackupPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + client.list_backup_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.ListBackupPoliciesRequest() + + +@pytest.mark.asyncio +async def test_list_backup_policies_async( + transport: str = "grpc_asyncio", + request_type=backup_policy.ListBackupPoliciesRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.ListBackupPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.ListBackupPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_policies_async_from_dict(): + await test_list_backup_policies_async(request_type=dict) + + +def test_list_backup_policies_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.ListBackupPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + call.return_value = backup_policy.ListBackupPoliciesResponse() + client.list_backup_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_policies_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.ListBackupPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.ListBackupPoliciesResponse() + ) + await client.list_backup_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_policies_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.ListBackupPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_policies_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_policies( + backup_policy.ListBackupPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_policies_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_policy.ListBackupPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_policy.ListBackupPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_policies_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_policies( + backup_policy.ListBackupPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_backup_policies_pager(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + next_page_token="abc", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], + next_page_token="def", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + ], + next_page_token="ghi", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_policies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_policy.BackupPolicy) for i in results) + + +def test_list_backup_policies_pages(transport_name: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + next_page_token="abc", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], + next_page_token="def", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + ], + next_page_token="ghi", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_policies_async_pager(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + next_page_token="abc", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], + next_page_token="def", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + ], + next_page_token="ghi", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup_policy.BackupPolicy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_policies_async_pages(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + next_page_token="abc", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], + next_page_token="def", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + ], + next_page_token="ghi", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_policy.UpdateBackupPolicyRequest, + dict, + ], +) +def test_update_backup_policy(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.UpdateBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + client.update_backup_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.UpdateBackupPolicyRequest() + + +@pytest.mark.asyncio +async def test_update_backup_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_backup_policy.UpdateBackupPolicyRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_backup_policy.UpdateBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_policy_async_from_dict(): + await test_update_backup_policy_async(request_type=dict) + + +def test_update_backup_policy_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_policy.UpdateBackupPolicyRequest() + + request.backup_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_policy_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_backup_policy.UpdateBackupPolicyRequest() + + request.backup_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_policy_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_policy( + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_policy + mock_val = gcn_backup_policy.BackupPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_policy_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_policy( + gcn_backup_policy.UpdateBackupPolicyRequest(), + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_policy_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_policy( + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_policy + mock_val = gcn_backup_policy.BackupPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_policy_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_policy( + gcn_backup_policy.UpdateBackupPolicyRequest(), + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_policy.DeleteBackupPolicyRequest, + dict, + ], +) +def test_delete_backup_policy(request_type, transport: str = "grpc"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.DeleteBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + client.delete_backup_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.DeleteBackupPolicyRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_policy_async( + transport: str = "grpc_asyncio", + request_type=backup_policy.DeleteBackupPolicyRequest, +): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup_policy.DeleteBackupPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_policy_async_from_dict(): + await test_delete_backup_policy_async(request_type=dict) + + +def test_delete_backup_policy_field_headers(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.DeleteBackupPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_policy_field_headers_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_policy.DeleteBackupPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_policy_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_policy_flattened_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_policy( + backup_policy.DeleteBackupPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_policy_flattened_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_policy_flattened_error_async(): + client = NetAppAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_policy( + backup_policy.DeleteBackupPolicyRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10342,48 +14654,5016 @@ async def test_reverse_replication_direction_field_headers_async(): dict, ], ) -def test_list_storage_pools_rest(request_type): +def test_list_storage_pools_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.ListStoragePoolsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_storage_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoragePoolsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_storage_pools_rest_required_fields( + request_type=storage_pool.ListStoragePoolsRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_storage_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_storage_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_pool.ListStoragePoolsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_storage_pools(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_storage_pools_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_storage_pools._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_storage_pools_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_list_storage_pools" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_list_storage_pools" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storage_pool.ListStoragePoolsRequest.pb( + storage_pool.ListStoragePoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storage_pool.ListStoragePoolsResponse.to_json( + storage_pool.ListStoragePoolsResponse() + ) + + request = storage_pool.ListStoragePoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storage_pool.ListStoragePoolsResponse() + + client.list_storage_pools( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_storage_pools_rest_bad_request( + transport: str = "rest", request_type=storage_pool.ListStoragePoolsRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_storage_pools(request) + + +def test_list_storage_pools_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.ListStoragePoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_storage_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_list_storage_pools_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_storage_pools( + storage_pool.ListStoragePoolsRequest(), + parent="parent_value", + ) + + +def test_list_storage_pools_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + storage_pool.StoragePool(), + storage_pool.StoragePool(), + ], + next_page_token="abc", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[], + next_page_token="def", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + ], + next_page_token="ghi", + ), + storage_pool.ListStoragePoolsResponse( + storage_pools=[ + storage_pool.StoragePool(), + storage_pool.StoragePool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + storage_pool.ListStoragePoolsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_storage_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, storage_pool.StoragePool) for i in results) + + pages = list(client.list_storage_pools(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_storage_pool.CreateStoragePoolRequest, + dict, + ], +) +def test_create_storage_pool_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["storage_pool"] = { + "name": "name_value", + "service_level": 1, + "capacity_gib": 1247, + "volume_capacity_gib": 2006, + "volume_count": 1312, + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + "network": "network_value", + "active_directory": "active_directory_value", + "kms_config": "kms_config_value", + "ldap_enabled": True, + "psa_range": "psa_range_value", + "encryption_type": 1, + "global_access_allowed": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_storage_pool.CreateStoragePoolRequest.meta.fields["storage_pool"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["storage_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool"][field])): + del request_init["storage_pool"][field][i][subfield] + else: + del request_init["storage_pool"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_storage_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_storage_pool_rest_required_fields( + request_type=gcn_storage_pool.CreateStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["storage_pool_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "storagePoolId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "storagePoolId" in jsonified_request + assert jsonified_request["storagePoolId"] == request_init["storage_pool_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["storagePoolId"] = "storage_pool_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_storage_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("storage_pool_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "storagePoolId" in jsonified_request + assert jsonified_request["storagePoolId"] == "storage_pool_id_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_storage_pool(request) + + expected_params = [ + ( + "storagePoolId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("storagePoolId",)) + & set( + ( + "parent", + "storagePoolId", + "storagePool", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_storage_pool_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_storage_pool" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_create_storage_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcn_storage_pool.CreateStoragePoolRequest.pb( + gcn_storage_pool.CreateStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcn_storage_pool.CreateStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_storage_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_storage_pool_rest_bad_request( + transport: str = "rest", request_type=gcn_storage_pool.CreateStoragePoolRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_storage_pool(request) + + +def test_create_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + storage_pool_id="storage_pool_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_storage_pool( + gcn_storage_pool.CreateStoragePoolRequest(), + parent="parent_value", + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + storage_pool_id="storage_pool_id_value", + ) + + +def test_create_storage_pool_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.GetStoragePoolRequest, + dict, + ], +) +def test_get_storage_pool_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.StoragePool( + name="name_value", + service_level=common.ServiceLevel.PREMIUM, + capacity_gib=1247, + volume_capacity_gib=2006, + volume_count=1312, + state=storage_pool.StoragePool.State.READY, + state_details="state_details_value", + description="description_value", + network="network_value", + active_directory="active_directory_value", + kms_config="kms_config_value", + ldap_enabled=True, + psa_range="psa_range_value", + encryption_type=common.EncryptionType.SERVICE_MANAGED, + global_access_allowed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_storage_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, storage_pool.StoragePool) + assert response.name == "name_value" + assert response.service_level == common.ServiceLevel.PREMIUM + assert response.capacity_gib == 1247 + assert response.volume_capacity_gib == 2006 + assert response.volume_count == 1312 + assert response.state == storage_pool.StoragePool.State.READY + assert response.state_details == "state_details_value" + assert response.description == "description_value" + assert response.network == "network_value" + assert response.active_directory == "active_directory_value" + assert response.kms_config == "kms_config_value" + assert response.ldap_enabled is True + assert response.psa_range == "psa_range_value" + assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED + assert response.global_access_allowed is True + + +def test_get_storage_pool_rest_required_fields( + request_type=storage_pool.GetStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = storage_pool.StoragePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = storage_pool.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_storage_pool_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_get_storage_pool" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_get_storage_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storage_pool.GetStoragePoolRequest.pb( + storage_pool.GetStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = storage_pool.StoragePool.to_json( + storage_pool.StoragePool() + ) + + request = storage_pool.GetStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = storage_pool.StoragePool() + + client.get_storage_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_storage_pool_rest_bad_request( + transport: str = "rest", request_type=storage_pool.GetStoragePoolRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_storage_pool(request) + + +def test_get_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = storage_pool.StoragePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = storage_pool.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_storage_pool( + storage_pool.GetStoragePoolRequest(), + name="name_value", + ) + + +def test_get_storage_pool_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_storage_pool.UpdateStoragePoolRequest, + dict, + ], +) +def test_update_storage_pool_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "storage_pool": { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + } + request_init["storage_pool"] = { + "name": "projects/sample1/locations/sample2/storagePools/sample3", + "service_level": 1, + "capacity_gib": 1247, + "volume_capacity_gib": 2006, + "volume_count": 1312, + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + "network": "network_value", + "active_directory": "active_directory_value", + "kms_config": "kms_config_value", + "ldap_enabled": True, + "psa_range": "psa_range_value", + "encryption_type": 1, + "global_access_allowed": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_storage_pool.UpdateStoragePoolRequest.meta.fields["storage_pool"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["storage_pool"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool"][field])): + del request_init["storage_pool"][field][i][subfield] + else: + del request_init["storage_pool"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_storage_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_storage_pool_rest_required_fields( + request_type=gcn_storage_pool.UpdateStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_storage_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "storagePool", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_storage_pool_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_update_storage_pool" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_update_storage_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcn_storage_pool.UpdateStoragePoolRequest.pb( + gcn_storage_pool.UpdateStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcn_storage_pool.UpdateStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_storage_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_storage_pool_rest_bad_request( + transport: str = "rest", request_type=gcn_storage_pool.UpdateStoragePoolRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "storage_pool": { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_storage_pool(request) + + +def test_update_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "storage_pool": { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{storage_pool.name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_storage_pool( + gcn_storage_pool.UpdateStoragePoolRequest(), + storage_pool=gcn_storage_pool.StoragePool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_storage_pool_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + storage_pool.DeleteStoragePoolRequest, + dict, + ], +) +def test_delete_storage_pool_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_storage_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_storage_pool_rest_required_fields( + request_type=storage_pool.DeleteStoragePoolRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_storage_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_storage_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_storage_pool_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_storage_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_storage_pool_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_storage_pool" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_delete_storage_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = storage_pool.DeleteStoragePoolRequest.pb( + storage_pool.DeleteStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = storage_pool.DeleteStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_storage_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_storage_pool_rest_bad_request( + transport: str = "rest", request_type=storage_pool.DeleteStoragePoolRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_storage_pool(request) + + +def test_delete_storage_pool_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/storagePools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_storage_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/storagePools/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_storage_pool( + storage_pool.DeleteStoragePoolRequest(), + name="name_value", + ) + + +def test_delete_storage_pool_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.ListVolumesRequest, + dict, + ], +) +def test_list_volumes_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_volumes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVolumesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_volumes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_volumes_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_volumes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_volumes_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_list_volumes" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_list_volumes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = volume.ListVolumesResponse.to_json( + volume.ListVolumesResponse() + ) + + request = volume.ListVolumesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = volume.ListVolumesResponse() + + client.list_volumes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_volumes_rest_bad_request( + transport: str = "rest", request_type=volume.ListVolumesRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_volumes(request) + + +def test_list_volumes_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_volumes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, + args[1], + ) + + +def test_list_volumes_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_volumes( + volume.ListVolumesRequest(), + parent="parent_value", + ) + + +def test_list_volumes_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_volumes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume.Volume) for i in results) + + pages = list(client.list_volumes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + volume.GetVolumeRequest, + dict, + ], +) +def test_get_volume_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume( + name="name_value", + state=volume.Volume.State.READY, + state_details="state_details_value", + share_name="share_name_value", + psa_range="psa_range_value", + storage_pool="storage_pool_value", + network="network_value", + service_level=common.ServiceLevel.PREMIUM, + capacity_gib=1247, + protocols=[volume.Protocols.NFSV3], + smb_settings=[volume.SMBSettings.ENCRYPT_DATA], + unix_permissions="unix_permissions_value", + description="description_value", + snap_reserve=0.1293, + snapshot_directory=True, + used_gib=834, + security_style=volume.SecurityStyle.NTFS, + kerberos_enabled=True, + ldap_enabled=True, + active_directory="active_directory_value", + kms_config="kms_config_value", + encryption_type=common.EncryptionType.SERVICE_MANAGED, + has_replication=True, + restricted_actions=[volume.RestrictedAction.DELETE], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_volume(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, volume.Volume) + assert response.name == "name_value" + assert response.state == volume.Volume.State.READY + assert response.state_details == "state_details_value" + assert response.share_name == "share_name_value" + assert response.psa_range == "psa_range_value" + assert response.storage_pool == "storage_pool_value" + assert response.network == "network_value" + assert response.service_level == common.ServiceLevel.PREMIUM + assert response.capacity_gib == 1247 + assert response.protocols == [volume.Protocols.NFSV3] + assert response.smb_settings == [volume.SMBSettings.ENCRYPT_DATA] + assert response.unix_permissions == "unix_permissions_value" + assert response.description == "description_value" + assert math.isclose(response.snap_reserve, 0.1293, rel_tol=1e-6) + assert response.snapshot_directory is True + assert response.used_gib == 834 + assert response.security_style == volume.SecurityStyle.NTFS + assert response.kerberos_enabled is True + assert response.ldap_enabled is True + assert response.active_directory == "active_directory_value" + assert response.kms_config == "kms_config_value" + assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED + assert response.has_replication is True + assert response.restricted_actions == [volume.RestrictedAction.DELETE] + + +def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_volume_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_get_volume" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_get_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = volume.Volume.to_json(volume.Volume()) + + request = volume.GetVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = volume.Volume() + + client.get_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_volume_rest_bad_request( + transport: str = "rest", request_type=volume.GetVolumeRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_volume(request) + + +def test_get_volume_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + args[1], + ) + + +def test_get_volume_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_volume( + volume.GetVolumeRequest(), + name="name_value", + ) + + +def test_get_volume_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_volume.CreateVolumeRequest, + dict, + ], +) +def test_create_volume_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["volume"] = { + "name": "name_value", + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "share_name": "share_name_value", + "psa_range": "psa_range_value", + "storage_pool": "storage_pool_value", + "network": "network_value", + "service_level": 1, + "capacity_gib": 1247, + "export_policy": { + "rules": [ + { + "allowed_clients": "allowed_clients_value", + "has_root_access": "has_root_access_value", + "access_type": 1, + "nfsv3": True, + "nfsv4": True, + "kerberos_5_read_only": True, + "kerberos_5_read_write": True, + "kerberos_5i_read_only": True, + "kerberos_5i_read_write": True, + "kerberos_5p_read_only": True, + "kerberos_5p_read_write": True, + } + ] + }, + "protocols": [1], + "smb_settings": [1], + "mount_options": [ + { + "export": "export_value", + "export_full": "export_full_value", + "protocol": 1, + "instructions": "instructions_value", + } + ], + "unix_permissions": "unix_permissions_value", + "labels": {}, + "description": "description_value", + "snapshot_policy": { + "enabled": True, + "hourly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + }, + "daily_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + }, + "weekly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + "day": "day_value", + }, + "monthly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + "days_of_month": "days_of_month_value", + }, + }, + "snap_reserve": 0.1293, + "snapshot_directory": True, + "used_gib": 834, + "security_style": 1, + "kerberos_enabled": True, + "ldap_enabled": True, + "active_directory": "active_directory_value", + "restore_parameters": { + "source_snapshot": "source_snapshot_value", + "source_backup": "source_backup_value", + }, + "kms_config": "kms_config_value", + "encryption_type": 1, + "has_replication": True, + "backup_config": { + "backup_policies": ["backup_policies_value1", "backup_policies_value2"], + "backup_vault": "backup_vault_value", + "scheduled_backup_enabled": True, + }, + "restricted_actions": [1], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_volume.CreateVolumeRequest.meta.fields["volume"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["volume"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["volume"][field])): + del request_init["volume"][field][i][subfield] + else: + del request_init["volume"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_volume_rest_required_fields( + request_type=gcn_volume.CreateVolumeRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["volume_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "volumeId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "volumeId" in jsonified_request + assert jsonified_request["volumeId"] == request_init["volume_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["volumeId"] = "volume_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("volume_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "volumeId" in jsonified_request + assert jsonified_request["volumeId"] == "volume_id_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_volume(request) + + expected_params = [ + ( + "volumeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("volumeId",)) + & set( + ( + "parent", + "volumeId", + "volume", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_volume_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_volume" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_create_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcn_volume.CreateVolumeRequest.pb(gcn_volume.CreateVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcn_volume.CreateVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_volume_rest_bad_request( + transport: str = "rest", request_type=gcn_volume.CreateVolumeRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_volume(request) + + +def test_create_volume_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + volume=gcn_volume.Volume(name="name_value"), + volume_id="volume_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, + args[1], + ) + + +def test_create_volume_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_volume( + gcn_volume.CreateVolumeRequest(), + parent="parent_value", + volume=gcn_volume.Volume(name="name_value"), + volume_id="volume_id_value", + ) + + +def test_create_volume_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_volume.UpdateVolumeRequest, + dict, + ], +) +def test_update_volume_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + request_init["volume"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3", + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "share_name": "share_name_value", + "psa_range": "psa_range_value", + "storage_pool": "storage_pool_value", + "network": "network_value", + "service_level": 1, + "capacity_gib": 1247, + "export_policy": { + "rules": [ + { + "allowed_clients": "allowed_clients_value", + "has_root_access": "has_root_access_value", + "access_type": 1, + "nfsv3": True, + "nfsv4": True, + "kerberos_5_read_only": True, + "kerberos_5_read_write": True, + "kerberos_5i_read_only": True, + "kerberos_5i_read_write": True, + "kerberos_5p_read_only": True, + "kerberos_5p_read_write": True, + } + ] + }, + "protocols": [1], + "smb_settings": [1], + "mount_options": [ + { + "export": "export_value", + "export_full": "export_full_value", + "protocol": 1, + "instructions": "instructions_value", + } + ], + "unix_permissions": "unix_permissions_value", + "labels": {}, + "description": "description_value", + "snapshot_policy": { + "enabled": True, + "hourly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + }, + "daily_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + }, + "weekly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + "day": "day_value", + }, + "monthly_schedule": { + "snapshots_to_keep": 0.18330000000000002, + "minute": 0.658, + "hour": 0.446, + "days_of_month": "days_of_month_value", + }, + }, + "snap_reserve": 0.1293, + "snapshot_directory": True, + "used_gib": 834, + "security_style": 1, + "kerberos_enabled": True, + "ldap_enabled": True, + "active_directory": "active_directory_value", + "restore_parameters": { + "source_snapshot": "source_snapshot_value", + "source_backup": "source_backup_value", + }, + "kms_config": "kms_config_value", + "encryption_type": 1, + "has_replication": True, + "backup_config": { + "backup_policies": ["backup_policies_value1", "backup_policies_value2"], + "backup_vault": "backup_vault_value", + "scheduled_backup_enabled": True, + }, + "restricted_actions": [1], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_volume.UpdateVolumeRequest.meta.fields["volume"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["volume"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["volume"][field])): + del request_init["volume"][field][i][subfield] + else: + del request_init["volume"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_volume_rest_required_fields( + request_type=gcn_volume.UpdateVolumeRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "volume", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_volume_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_update_volume" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_update_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcn_volume.UpdateVolumeRequest.pb(gcn_volume.UpdateVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcn_volume.UpdateVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_volume_rest_bad_request( + transport: str = "rest", request_type=gcn_volume.UpdateVolumeRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_volume(request) + + +def test_update_volume_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + volume=gcn_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{volume.name=projects/*/locations/*/volumes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_volume_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_volume( + gcn_volume.UpdateVolumeRequest(), + volume=gcn_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_volume_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.DeleteVolumeRequest, + dict, + ], +) +def test_delete_volume_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_volume_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_volume" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_delete_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.DeleteVolumeRequest.pb(volume.DeleteVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = volume.DeleteVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_volume_rest_bad_request( + transport: str = "rest", request_type=volume.DeleteVolumeRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_volume(request) + + +def test_delete_volume_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + args[1], + ) + + +def test_delete_volume_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_volume( + volume.DeleteVolumeRequest(), + name="name_value", + ) + + +def test_delete_volume_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.RevertVolumeRequest, + dict, + ], +) +def test_revert_volume_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.revert_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request_init["snapshot_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).revert_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["snapshotId"] = "snapshot_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).revert_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == "snapshot_id_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.revert_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_revert_volume_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.revert_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "snapshotId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_revert_volume_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_revert_volume" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_revert_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.RevertVolumeRequest.pb(volume.RevertVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = volume.RevertVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.revert_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_revert_volume_rest_bad_request( + transport: str = "rest", request_type=volume.RevertVolumeRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.revert_volume(request) + + +def test_revert_volume_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshot.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshot.ListSnapshotsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = snapshot.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_snapshots_rest_required_fields( + request_type=snapshot.ListSnapshotsRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = snapshot.ListSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = snapshot.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_snapshots(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_snapshots_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshot.ListSnapshotsRequest.pb(snapshot.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshot.ListSnapshotsResponse.to_json( + snapshot.ListSnapshotsResponse() + ) + + request = snapshot.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshot.ListSnapshotsResponse() + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request( + transport: str = "rest", request_type=snapshot.ListSnapshotsRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_snapshots(request) + + +def test_list_snapshots_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshot.ListSnapshotsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = snapshot.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_snapshots(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + % client.transport._host, + args[1], + ) + + +def test_list_snapshots_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + snapshot.ListSnapshotsRequest(), + parent="parent_value", + ) + + +def test_list_snapshots_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), + snapshot.Snapshot(), + snapshot.Snapshot(), + ], + next_page_token="abc", + ), + snapshot.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), + ], + next_page_token="ghi", + ), + snapshot.ListSnapshotsResponse( + snapshots=[ + snapshot.Snapshot(), + snapshot.Snapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(snapshot.ListSnapshotsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + pager = client.list_snapshots(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, snapshot.Snapshot) for i in results) + + pages = list(client.list_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + snapshot.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshot.Snapshot( + name="name_value", + state=snapshot.Snapshot.State.READY, + state_details="state_details_value", + description="description_value", + used_bytes=0.10790000000000001, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = snapshot.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshot.Snapshot) + assert response.name == "name_value" + assert response.state == snapshot.Snapshot.State.READY + assert response.state_details == "state_details_value" + assert response.description == "description_value" + assert math.isclose(response.used_bytes, 0.10790000000000001, rel_tol=1e-6) + + +def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequest): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = snapshot.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = snapshot.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_snapshot_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetAppRestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshot.GetSnapshotRequest.pb(snapshot.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshot.Snapshot.to_json(snapshot.Snapshot()) + + request = snapshot.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshot.Snapshot() + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshot.GetSnapshotRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_snapshot(request) + + +def test_get_snapshot_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshot.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = snapshot.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, + args[1], + ) + + +def test_get_snapshot_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + snapshot.GetSnapshotRequest(), + name="name_value", + ) + + +def test_get_snapshot_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_snapshot.CreateSnapshotRequest, + dict, + ], +) +def test_create_snapshot_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init["snapshot"] = { + "name": "name_value", + "state": 1, + "state_details": "state_details_value", + "description": "description_value", + "used_bytes": 0.10790000000000001, + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_snapshot.CreateSnapshotRequest.meta.fields["snapshot"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot"][field])): + del request_init["snapshot"][field][i][subfield] + else: + del request_init["snapshot"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_snapshot_rest_required_fields( + request_type=gcn_snapshot.CreateSnapshotRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["snapshot_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "snapshotId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == request_init["snapshot_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["snapshotId"] = "snapshot_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("snapshot_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "snapshotId" in jsonified_request + assert jsonified_request["snapshotId"] == "snapshot_id_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_snapshot(request) + + expected_params = [ + ( + "snapshotId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_snapshot_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("snapshotId",)) + & set( + ( + "parent", + "snapshot", + "snapshotId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_snapshot" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_create_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcn_snapshot.CreateSnapshotRequest.pb( + gcn_snapshot.CreateSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcn_snapshot.CreateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_rest_bad_request( + transport: str = "rest", request_type=gcn_snapshot.CreateSnapshotRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + +def test_create_snapshot_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + snapshot=gcn_snapshot.Snapshot(name="name_value"), + snapshot_id="snapshot_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + % client.transport._host, + args[1], + ) + + +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + gcn_snapshot.CreateSnapshotRequest(), + parent="parent_value", + snapshot=gcn_snapshot.Snapshot(name="name_value"), + snapshot_id="snapshot_id_value", + ) + + +def test_create_snapshot_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshot.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_snapshot_rest_required_fields( + request_type=snapshot.DeleteSnapshotRequest, +): + transport_class = transports.NetAppRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_snapshot_rest_unset_required_fields(): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.NetAppRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), + ) + client = NetAppClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_snapshot" + ) as post, mock.patch.object( + transports.NetAppRestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshot.DeleteSnapshotRequest.pb(snapshot.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = snapshot.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshot.DeleteSnapshotRequest +): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_snapshot(request) + + +def test_delete_snapshot_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + snapshot.DeleteSnapshotRequest(), + name="name_value", + ) + + +def test_delete_snapshot_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_snapshot.UpdateSnapshotRequest, + dict, + ], +) +def test_update_snapshot_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # send a request that will satisfy transcoding + request_init = { + "snapshot": { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + } + request_init["snapshot"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4", + "state": 1, + "state_details": "state_details_value", + "description": "description_value", + "used_bytes": 0.10790000000000001, + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_snapshot.UpdateSnapshotRequest.meta.fields["snapshot"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot"][field])): + del request_init["snapshot"][field][i][subfield] + else: + del request_init["snapshot"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_pool.ListStoragePoolsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_storage_pools(request) + response = client.update_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoragePoolsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_storage_pools_rest_required_fields( - request_type=storage_pool.ListStoragePoolsRequest, +def test_update_snapshot_rest_required_fields( + request_type=gcn_snapshot.UpdateSnapshotRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10398,30 +19678,19 @@ def test_list_storage_pools_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_storage_pools._get_unset_required_fields(jsonified_request) + ).update_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_storage_pools._get_unset_required_fields(jsonified_request) + ).update_snapshot._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10430,7 +19699,7 @@ def test_list_storage_pools_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_pool.ListStoragePoolsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10442,49 +19711,45 @@ def test_list_storage_pools_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_storage_pools(request) + response = client.update_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_storage_pools_rest_unset_required_fields(): +def test_update_snapshot_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_storage_pools._get_unset_required_fields({}) + unset_fields = transport.update_snapshot._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "updateMask", + "snapshot", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_storage_pools_rest_interceptors(null_interceptor): +def test_update_snapshot_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -10495,14 +19760,16 @@ def test_list_storage_pools_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_storage_pools" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_update_snapshot" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_storage_pools" + transports.NetAppRestInterceptor, "pre_update_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = storage_pool.ListStoragePoolsRequest.pb( - storage_pool.ListStoragePoolsRequest() + pb_message = gcn_snapshot.UpdateSnapshotRequest.pb( + gcn_snapshot.UpdateSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -10514,19 +19781,19 @@ def test_list_storage_pools_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = storage_pool.ListStoragePoolsResponse.to_json( - storage_pool.ListStoragePoolsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = storage_pool.ListStoragePoolsRequest() + request = gcn_snapshot.UpdateSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = storage_pool.ListStoragePoolsResponse() + post.return_value = operations_pb2.Operation() - client.list_storage_pools( + client.update_snapshot( request, metadata=[ ("key", "val"), @@ -10538,8 +19805,8 @@ def test_list_storage_pools_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_storage_pools_rest_bad_request( - transport: str = "rest", request_type=storage_pool.ListStoragePoolsRequest +def test_update_snapshot_rest_bad_request( + transport: str = "rest", request_type=gcn_snapshot.UpdateSnapshotRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10547,7 +19814,11 @@ def test_list_storage_pools_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "snapshot": { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10559,10 +19830,10 @@ def test_list_storage_pools_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_storage_pools(request) + client.update_snapshot(request) -def test_list_storage_pools_rest_flattened(): +def test_update_snapshot_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10571,246 +19842,113 @@ def test_list_storage_pools_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_pool.ListStoragePoolsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "snapshot": { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + snapshot=gcn_snapshot.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = storage_pool.ListStoragePoolsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_storage_pools(**mock_args) + client.update_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/storagePools" + "%s/v1/{snapshot.name=projects/*/locations/*/volumes/*/snapshots/*}" % client.transport._host, args[1], ) -def test_list_storage_pools_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_storage_pools( - storage_pool.ListStoragePoolsRequest(), - parent="parent_value", - ) - - -def test_list_storage_pools_rest_pager(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - storage_pool.StoragePool(), - storage_pool.StoragePool(), - ], - next_page_token="abc", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[], - next_page_token="def", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - ], - next_page_token="ghi", - ), - storage_pool.ListStoragePoolsResponse( - storage_pools=[ - storage_pool.StoragePool(), - storage_pool.StoragePool(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - storage_pool.ListStoragePoolsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_storage_pools(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, storage_pool.StoragePool) for i in results) - - pages = list(client.list_storage_pools(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - gcn_storage_pool.CreateStoragePoolRequest, - dict, - ], -) -def test_create_storage_pool_rest(request_type): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["storage_pool"] = { - "name": "name_value", - "service_level": 1, - "capacity_gib": 1247, - "volume_capacity_gib": 2006, - "volume_count": 1312, - "state": 1, - "state_details": "state_details_value", - "create_time": {"seconds": 751, "nanos": 543}, - "description": "description_value", - "labels": {}, - "network": "network_value", - "active_directory": "active_directory_value", - "kms_config": "kms_config_value", - "ldap_enabled": True, - "psa_range": "psa_range_value", - "encryption_type": 1, - "global_access_allowed": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_storage_pool.CreateStoragePoolRequest.meta.fields["storage_pool"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +def test_update_snapshot_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + gcn_snapshot.UpdateSnapshotRequest(), + snapshot=gcn_snapshot.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_update_snapshot_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["storage_pool"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + active_directory.ListActiveDirectoriesRequest, + dict, + ], +) +def test_list_active_directories_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["storage_pool"][field])): - del request_init["storage_pool"][field][i][subfield] - else: - del request_init["storage_pool"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = active_directory.ListActiveDirectoriesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_storage_pool(request) + response = client.list_active_directories(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListActiveDirectoriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_storage_pool_rest_required_fields( - request_type=gcn_storage_pool.CreateStoragePoolRequest, +def test_list_active_directories_rest_required_fields( + request_type=active_directory.ListActiveDirectoriesRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["storage_pool_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10822,32 +19960,33 @@ def test_create_storage_pool_rest_required_fields( ) # verify fields with default values are dropped - assert "storagePoolId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_storage_pool._get_unset_required_fields(jsonified_request) + ).list_active_directories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "storagePoolId" in jsonified_request - assert jsonified_request["storagePoolId"] == request_init["storage_pool_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["storagePoolId"] = "storage_pool_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_storage_pool._get_unset_required_fields(jsonified_request) + ).list_active_directories._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("storage_pool_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "storagePoolId" in jsonified_request - assert jsonified_request["storagePoolId"] == "storage_pool_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10856,7 +19995,7 @@ def test_create_storage_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = active_directory.ListActiveDirectoriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10868,52 +20007,51 @@ def test_create_storage_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = active_directory.ListActiveDirectoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_storage_pool(request) + response = client.list_active_directories(request) - expected_params = [ - ( - "storagePoolId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_storage_pool_rest_unset_required_fields(): +def test_list_active_directories_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_storage_pool._get_unset_required_fields({}) + unset_fields = transport.list_active_directories._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("storagePoolId",)) - & set( + set( ( - "parent", - "storagePoolId", - "storagePool", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_storage_pool_rest_interceptors(null_interceptor): +def test_list_active_directories_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -10924,16 +20062,14 @@ def test_create_storage_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_storage_pool" + transports.NetAppRestInterceptor, "post_list_active_directories" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_storage_pool" + transports.NetAppRestInterceptor, "pre_list_active_directories" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_storage_pool.CreateStoragePoolRequest.pb( - gcn_storage_pool.CreateStoragePoolRequest() + pb_message = active_directory.ListActiveDirectoriesRequest.pb( + active_directory.ListActiveDirectoriesRequest() ) transcode.return_value = { "method": "post", @@ -10945,19 +20081,21 @@ def test_create_storage_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + active_directory.ListActiveDirectoriesResponse.to_json( + active_directory.ListActiveDirectoriesResponse() + ) ) - request = gcn_storage_pool.CreateStoragePoolRequest() + request = active_directory.ListActiveDirectoriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = active_directory.ListActiveDirectoriesResponse() - client.create_storage_pool( + client.list_active_directories( request, metadata=[ ("key", "val"), @@ -10969,8 +20107,8 @@ def test_create_storage_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_storage_pool_rest_bad_request( - transport: str = "rest", request_type=gcn_storage_pool.CreateStoragePoolRequest +def test_list_active_directories_rest_bad_request( + transport: str = "rest", request_type=active_directory.ListActiveDirectoriesRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10990,10 +20128,10 @@ def test_create_storage_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_storage_pool(request) + client.list_active_directories(request) -def test_create_storage_pool_rest_flattened(): +def test_list_active_directories_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11002,7 +20140,7 @@ def test_create_storage_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = active_directory.ListActiveDirectoriesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -11010,32 +20148,32 @@ def test_create_storage_pool_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - storage_pool_id="storage_pool_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_storage_pool(**mock_args) + client.list_active_directories(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/storagePools" + "%s/v1/{parent=projects/*/locations/*}/activeDirectories" % client.transport._host, args[1], ) -def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_list_active_directories_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11044,90 +20182,155 @@ def test_create_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_storage_pool( - gcn_storage_pool.CreateStoragePoolRequest(), + client.list_active_directories( + active_directory.ListActiveDirectoriesRequest(), parent="parent_value", - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - storage_pool_id="storage_pool_id_value", ) -def test_create_storage_pool_rest_error(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_list_active_directories_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), + ], + next_page_token="abc", + ), + active_directory.ListActiveDirectoriesResponse( + active_directories=[], + next_page_token="def", + ), + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), + ], + next_page_token="ghi", + ), + active_directory.ListActiveDirectoriesResponse( + active_directories=[ + active_directory.ActiveDirectory(), + active_directory.ActiveDirectory(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + active_directory.ListActiveDirectoriesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_active_directories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, active_directory.ActiveDirectory) for i in results) + + pages = list(client.list_active_directories(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - storage_pool.GetStoragePoolRequest, + active_directory.GetActiveDirectoryRequest, dict, ], ) -def test_get_storage_pool_rest(request_type): +def test_get_active_directory_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_pool.StoragePool( + return_value = active_directory.ActiveDirectory( name="name_value", - service_level=common.ServiceLevel.PREMIUM, - capacity_gib=1247, - volume_capacity_gib=2006, - volume_count=1312, - state=storage_pool.StoragePool.State.READY, - state_details="state_details_value", + state=active_directory.ActiveDirectory.State.CREATING, + domain="domain_value", + site="site_value", + dns="dns_value", + net_bios_prefix="net_bios_prefix_value", + organizational_unit="organizational_unit_value", + aes_encryption=True, + username="username_value", + password="password_value", + backup_operators=["backup_operators_value"], + security_operators=["security_operators_value"], + kdc_hostname="kdc_hostname_value", + kdc_ip="kdc_ip_value", + nfs_users_with_ldap=True, description="description_value", - network="network_value", - active_directory="active_directory_value", - kms_config="kms_config_value", - ldap_enabled=True, - psa_range="psa_range_value", - encryption_type=common.EncryptionType.SERVICE_MANAGED, - global_access_allowed=True, + ldap_signing=True, + encrypt_dc_connections=True, + state_details="state_details_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_pool.StoragePool.pb(return_value) + return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_storage_pool(request) + response = client.get_active_directory(request) # Establish that the response is the type that we expect. - assert isinstance(response, storage_pool.StoragePool) + assert isinstance(response, active_directory.ActiveDirectory) assert response.name == "name_value" - assert response.service_level == common.ServiceLevel.PREMIUM - assert response.capacity_gib == 1247 - assert response.volume_capacity_gib == 2006 - assert response.volume_count == 1312 - assert response.state == storage_pool.StoragePool.State.READY - assert response.state_details == "state_details_value" + assert response.state == active_directory.ActiveDirectory.State.CREATING + assert response.domain == "domain_value" + assert response.site == "site_value" + assert response.dns == "dns_value" + assert response.net_bios_prefix == "net_bios_prefix_value" + assert response.organizational_unit == "organizational_unit_value" + assert response.aes_encryption is True + assert response.username == "username_value" + assert response.password == "password_value" + assert response.backup_operators == ["backup_operators_value"] + assert response.security_operators == ["security_operators_value"] + assert response.kdc_hostname == "kdc_hostname_value" + assert response.kdc_ip == "kdc_ip_value" + assert response.nfs_users_with_ldap is True assert response.description == "description_value" - assert response.network == "network_value" - assert response.active_directory == "active_directory_value" - assert response.kms_config == "kms_config_value" - assert response.ldap_enabled is True - assert response.psa_range == "psa_range_value" - assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED - assert response.global_access_allowed is True + assert response.ldap_signing is True + assert response.encrypt_dc_connections is True + assert response.state_details == "state_details_value" -def test_get_storage_pool_rest_required_fields( - request_type=storage_pool.GetStoragePoolRequest, +def test_get_active_directory_rest_required_fields( + request_type=active_directory.GetActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport @@ -11147,7 +20350,7 @@ def test_get_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_storage_pool._get_unset_required_fields(jsonified_request) + ).get_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11156,7 +20359,7 @@ def test_get_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_storage_pool._get_unset_required_fields(jsonified_request) + ).get_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11170,7 +20373,7 @@ def test_get_storage_pool_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = storage_pool.StoragePool() + return_value = active_directory.ActiveDirectory() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11191,30 +20394,30 @@ def test_get_storage_pool_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_pool.StoragePool.pb(return_value) + return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_storage_pool(request) + response = client.get_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_storage_pool_rest_unset_required_fields(): +def test_get_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_storage_pool._get_unset_required_fields({}) + unset_fields = transport.get_active_directory._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_storage_pool_rest_interceptors(null_interceptor): +def test_get_active_directory_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -11225,14 +20428,14 @@ def test_get_storage_pool_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_storage_pool" + transports.NetAppRestInterceptor, "post_get_active_directory" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_storage_pool" + transports.NetAppRestInterceptor, "pre_get_active_directory" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = storage_pool.GetStoragePoolRequest.pb( - storage_pool.GetStoragePoolRequest() + pb_message = active_directory.GetActiveDirectoryRequest.pb( + active_directory.GetActiveDirectoryRequest() ) transcode.return_value = { "method": "post", @@ -11244,19 +20447,19 @@ def test_get_storage_pool_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = storage_pool.StoragePool.to_json( - storage_pool.StoragePool() + req.return_value._content = active_directory.ActiveDirectory.to_json( + active_directory.ActiveDirectory() ) - request = storage_pool.GetStoragePoolRequest() + request = active_directory.GetActiveDirectoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = storage_pool.StoragePool() + post.return_value = active_directory.ActiveDirectory() - client.get_storage_pool( + client.get_active_directory( request, metadata=[ ("key", "val"), @@ -11268,8 +20471,8 @@ def test_get_storage_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_storage_pool_rest_bad_request( - transport: str = "rest", request_type=storage_pool.GetStoragePoolRequest +def test_get_active_directory_rest_bad_request( + transport: str = "rest", request_type=active_directory.GetActiveDirectoryRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11277,7 +20480,9 @@ def test_get_storage_pool_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11289,10 +20494,10 @@ def test_get_storage_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_storage_pool(request) + client.get_active_directory(request) -def test_get_storage_pool_rest_flattened(): +def test_get_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11301,11 +20506,11 @@ def test_get_storage_pool_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = storage_pool.StoragePool() + return_value = active_directory.ActiveDirectory() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/storagePools/sample3" + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" } # get truthy value for each flattened field @@ -11318,25 +20523,25 @@ def test_get_storage_pool_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = storage_pool.StoragePool.pb(return_value) + return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_storage_pool(**mock_args) + client.get_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/storagePools/*}" + "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" % client.transport._host, args[1], ) -def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_get_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11345,13 +20550,13 @@ def test_get_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_storage_pool( - storage_pool.GetStoragePoolRequest(), + client.get_active_directory( + active_directory.GetActiveDirectoryRequest(), name="name_value", ) -def test_get_storage_pool_rest_error(): +def test_get_active_directory_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11360,47 +20565,52 @@ def test_get_storage_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_storage_pool.UpdateStoragePoolRequest, + gcn_active_directory.CreateActiveDirectoryRequest, dict, ], ) -def test_update_storage_pool_rest(request_type): +def test_create_active_directory_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "storage_pool": { - "name": "projects/sample1/locations/sample2/storagePools/sample3" - } - } - request_init["storage_pool"] = { - "name": "projects/sample1/locations/sample2/storagePools/sample3", - "service_level": 1, - "capacity_gib": 1247, - "volume_capacity_gib": 2006, - "volume_count": 1312, - "state": 1, - "state_details": "state_details_value", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["active_directory"] = { + "name": "name_value", "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "domain": "domain_value", + "site": "site_value", + "dns": "dns_value", + "net_bios_prefix": "net_bios_prefix_value", + "organizational_unit": "organizational_unit_value", + "aes_encryption": True, + "username": "username_value", + "password": "password_value", + "backup_operators": ["backup_operators_value1", "backup_operators_value2"], + "security_operators": [ + "security_operators_value1", + "security_operators_value2", + ], + "kdc_hostname": "kdc_hostname_value", + "kdc_ip": "kdc_ip_value", + "nfs_users_with_ldap": True, "description": "description_value", + "ldap_signing": True, + "encrypt_dc_connections": True, "labels": {}, - "network": "network_value", - "active_directory": "active_directory_value", - "kms_config": "kms_config_value", - "ldap_enabled": True, - "psa_range": "psa_range_value", - "encryption_type": 1, - "global_access_allowed": True, + "state_details": "state_details_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_storage_pool.UpdateStoragePoolRequest.meta.fields["storage_pool"] + test_field = gcn_active_directory.CreateActiveDirectoryRequest.meta.fields[ + "active_directory" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -11428,7 +20638,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["storage_pool"].items(): # pragma: NO COVER + for field, value in request_init["active_directory"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -11458,10 +20668,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["storage_pool"][field])): - del request_init["storage_pool"][field][i][subfield] + for i in range(0, len(request_init["active_directory"][field])): + del request_init["active_directory"][field][i][subfield] else: - del request_init["storage_pool"][field][subfield] + del request_init["active_directory"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11476,18 +20686,20 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_storage_pool(request) + response = client.create_active_directory(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_storage_pool_rest_required_fields( - request_type=gcn_storage_pool.UpdateStoragePoolRequest, +def test_create_active_directory_rest_required_fields( + request_type=gcn_active_directory.CreateActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["parent"] = "" + request_init["active_directory_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11499,22 +20711,32 @@ def test_update_storage_pool_rest_required_fields( ) # verify fields with default values are dropped + assert "activeDirectoryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_storage_pool._get_unset_required_fields(jsonified_request) + ).create_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "activeDirectoryId" in jsonified_request + assert jsonified_request["activeDirectoryId"] == request_init["active_directory_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["activeDirectoryId"] = "active_directory_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_storage_pool._get_unset_required_fields(jsonified_request) + ).create_active_directory._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("active_directory_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "activeDirectoryId" in jsonified_request + assert jsonified_request["activeDirectoryId"] == "active_directory_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11535,7 +20757,7 @@ def test_update_storage_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -11548,32 +20770,39 @@ def test_update_storage_pool_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_storage_pool(request) + response = client.create_active_directory(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "activeDirectoryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_storage_pool_rest_unset_required_fields(): +def test_create_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_storage_pool._get_unset_required_fields({}) + unset_fields = transport.create_active_directory._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(("activeDirectoryId",)) & set( ( - "updateMask", - "storagePool", + "parent", + "activeDirectory", + "activeDirectoryId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_storage_pool_rest_interceptors(null_interceptor): +def test_create_active_directory_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -11586,14 +20815,14 @@ def test_update_storage_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_storage_pool" + transports.NetAppRestInterceptor, "post_create_active_directory" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_storage_pool" + transports.NetAppRestInterceptor, "pre_create_active_directory" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_storage_pool.UpdateStoragePoolRequest.pb( - gcn_storage_pool.UpdateStoragePoolRequest() + pb_message = gcn_active_directory.CreateActiveDirectoryRequest.pb( + gcn_active_directory.CreateActiveDirectoryRequest() ) transcode.return_value = { "method": "post", @@ -11609,7 +20838,7 @@ def test_update_storage_pool_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_storage_pool.UpdateStoragePoolRequest() + request = gcn_active_directory.CreateActiveDirectoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11617,7 +20846,7 @@ def test_update_storage_pool_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_storage_pool( + client.create_active_directory( request, metadata=[ ("key", "val"), @@ -11629,8 +20858,9 @@ def test_update_storage_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_storage_pool_rest_bad_request( - transport: str = "rest", request_type=gcn_storage_pool.UpdateStoragePoolRequest +def test_create_active_directory_rest_bad_request( + transport: str = "rest", + request_type=gcn_active_directory.CreateActiveDirectoryRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11638,11 +20868,7 @@ def test_update_storage_pool_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "storage_pool": { - "name": "projects/sample1/locations/sample2/storagePools/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11654,10 +20880,10 @@ def test_update_storage_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_storage_pool(request) + client.create_active_directory(request) -def test_update_storage_pool_rest_flattened(): +def test_create_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11669,16 +20895,13 @@ def test_update_storage_pool_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "storage_pool": { - "name": "projects/sample1/locations/sample2/storagePools/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + active_directory_id="active_directory_id_value", ) mock_args.update(sample_request) @@ -11689,20 +20912,20 @@ def test_update_storage_pool_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_storage_pool(**mock_args) + client.create_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{storage_pool.name=projects/*/locations/*/storagePools/*}" + "%s/v1/{parent=projects/*/locations/*}/activeDirectories" % client.transport._host, args[1], ) -def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_create_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11711,14 +20934,15 @@ def test_update_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_storage_pool( - gcn_storage_pool.UpdateStoragePoolRequest(), - storage_pool=gcn_storage_pool.StoragePool(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_active_directory( + gcn_active_directory.CreateActiveDirectoryRequest(), + parent="parent_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + active_directory_id="active_directory_id_value", ) -def test_update_storage_pool_rest_error(): +def test_create_active_directory_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11727,18 +20951,117 @@ def test_update_storage_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - storage_pool.DeleteStoragePoolRequest, + gcn_active_directory.UpdateActiveDirectoryRequest, dict, ], ) -def test_delete_storage_pool_rest(request_type): +def test_update_active_directory_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request_init = { + "active_directory": { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } + } + request_init["active_directory"] = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "domain": "domain_value", + "site": "site_value", + "dns": "dns_value", + "net_bios_prefix": "net_bios_prefix_value", + "organizational_unit": "organizational_unit_value", + "aes_encryption": True, + "username": "username_value", + "password": "password_value", + "backup_operators": ["backup_operators_value1", "backup_operators_value2"], + "security_operators": [ + "security_operators_value1", + "security_operators_value2", + ], + "kdc_hostname": "kdc_hostname_value", + "kdc_ip": "kdc_ip_value", + "nfs_users_with_ldap": True, + "description": "description_value", + "ldap_signing": True, + "encrypt_dc_connections": True, + "labels": {}, + "state_details": "state_details_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_active_directory.UpdateActiveDirectoryRequest.meta.fields[ + "active_directory" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["active_directory"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["active_directory"][field])): + del request_init["active_directory"][field][i][subfield] + else: + del request_init["active_directory"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11753,19 +21076,18 @@ def test_delete_storage_pool_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_storage_pool(request) + response = client.update_active_directory(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_storage_pool_rest_required_fields( - request_type=storage_pool.DeleteStoragePoolRequest, +def test_update_active_directory_rest_required_fields( + request_type=gcn_active_directory.UpdateActiveDirectoryRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11780,21 +21102,19 @@ def test_delete_storage_pool_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_storage_pool._get_unset_required_fields(jsonified_request) + ).update_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_storage_pool._get_unset_required_fields(jsonified_request) + ).update_active_directory._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11815,9 +21135,10 @@ def test_delete_storage_pool_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11827,24 +21148,32 @@ def test_delete_storage_pool_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_storage_pool(request) + response = client.update_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_storage_pool_rest_unset_required_fields(): +def test_update_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_storage_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_active_directory._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "activeDirectory", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_storage_pool_rest_interceptors(null_interceptor): +def test_update_active_directory_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -11857,14 +21186,14 @@ def test_delete_storage_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_storage_pool" + transports.NetAppRestInterceptor, "post_update_active_directory" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_storage_pool" + transports.NetAppRestInterceptor, "pre_update_active_directory" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = storage_pool.DeleteStoragePoolRequest.pb( - storage_pool.DeleteStoragePoolRequest() + pb_message = gcn_active_directory.UpdateActiveDirectoryRequest.pb( + gcn_active_directory.UpdateActiveDirectoryRequest() ) transcode.return_value = { "method": "post", @@ -11880,7 +21209,7 @@ def test_delete_storage_pool_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = storage_pool.DeleteStoragePoolRequest() + request = gcn_active_directory.UpdateActiveDirectoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11888,7 +21217,7 @@ def test_delete_storage_pool_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_storage_pool( + client.update_active_directory( request, metadata=[ ("key", "val"), @@ -11900,8 +21229,9 @@ def test_delete_storage_pool_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_storage_pool_rest_bad_request( - transport: str = "rest", request_type=storage_pool.DeleteStoragePoolRequest +def test_update_active_directory_rest_bad_request( + transport: str = "rest", + request_type=gcn_active_directory.UpdateActiveDirectoryRequest, ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11909,7 +21239,11 @@ def test_delete_storage_pool_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/storagePools/sample3"} + request_init = { + "active_directory": { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11921,10 +21255,10 @@ def test_delete_storage_pool_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_storage_pool(request) + client.update_active_directory(request) -def test_delete_storage_pool_rest_flattened(): +def test_update_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11937,12 +21271,15 @@ def test_delete_storage_pool_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/storagePools/sample3" + "active_directory": { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -11953,20 +21290,20 @@ def test_delete_storage_pool_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_storage_pool(**mock_args) + client.update_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/storagePools/*}" + "%s/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}" % client.transport._host, args[1], ) -def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): +def test_update_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11975,13 +21312,14 @@ def test_delete_storage_pool_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_storage_pool( - storage_pool.DeleteStoragePoolRequest(), - name="name_value", + client.update_active_directory( + gcn_active_directory.UpdateActiveDirectoryRequest(), + active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_storage_pool_rest_error(): +def test_update_active_directory_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11990,50 +21328,47 @@ def test_delete_storage_pool_rest_error(): @pytest.mark.parametrize( "request_type", [ - volume.ListVolumesRequest, + active_directory.DeleteActiveDirectoryRequest, dict, ], ) -def test_list_volumes_rest(request_type): +def test_delete_active_directory_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_volumes(request) + response = client.delete_active_directory(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): +def test_delete_active_directory_rest_required_fields( + request_type=active_directory.DeleteActiveDirectoryRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12048,30 +21383,21 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) + ).delete_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).delete_active_directory._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12080,7 +21406,7 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12092,49 +21418,36 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_volumes(request) + response = client.delete_active_directory(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_volumes_rest_unset_required_fields(): +def test_delete_active_directory_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_volumes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.delete_active_directory._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_volumes_rest_interceptors(null_interceptor): +def test_delete_active_directory_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -12145,13 +21458,17 @@ def test_list_volumes_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_volumes" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_active_directory" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_volumes" + transports.NetAppRestInterceptor, "pre_delete_active_directory" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) + pb_message = active_directory.DeleteActiveDirectoryRequest.pb( + active_directory.DeleteActiveDirectoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12162,19 +21479,19 @@ def test_list_volumes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = volume.ListVolumesResponse.to_json( - volume.ListVolumesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = volume.ListVolumesRequest() + request = active_directory.DeleteActiveDirectoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = volume.ListVolumesResponse() + post.return_value = operations_pb2.Operation() - client.list_volumes( + client.delete_active_directory( request, metadata=[ ("key", "val"), @@ -12186,8 +21503,8 @@ def test_list_volumes_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_volumes_rest_bad_request( - transport: str = "rest", request_type=volume.ListVolumesRequest +def test_delete_active_directory_rest_bad_request( + transport: str = "rest", request_type=active_directory.DeleteActiveDirectoryRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12195,7 +21512,9 @@ def test_list_volumes_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12207,10 +21526,10 @@ def test_list_volumes_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_volumes(request) + client.delete_active_directory(request) -def test_list_volumes_rest_flattened(): +def test_delete_active_directory_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12219,39 +21538,40 @@ def test_list_volumes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_volumes(**mock_args) + client.delete_active_directory(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" + % client.transport._host, args[1], ) -def test_list_volumes_rest_flattened_error(transport: str = "rest"): +def test_delete_active_directory_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12260,164 +21580,65 @@ def test_list_volumes_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), - parent="parent_value", + client.delete_active_directory( + active_directory.DeleteActiveDirectoryRequest(), + name="name_value", ) -def test_list_volumes_rest_pager(transport: str = "rest"): +def test_delete_active_directory_rest_error(): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_volumes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume.Volume) for i in results) - - pages = list(client.list_volumes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - volume.GetVolumeRequest, + kms.ListKmsConfigsRequest, dict, ], ) -def test_get_volume_rest(request_type): +def test_list_kms_configs_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = volume.Volume( - name="name_value", - state=volume.Volume.State.READY, - state_details="state_details_value", - share_name="share_name_value", - psa_range="psa_range_value", - storage_pool="storage_pool_value", - network="network_value", - service_level=common.ServiceLevel.PREMIUM, - capacity_gib=1247, - protocols=[volume.Protocols.NFSV3], - smb_settings=[volume.SMBSettings.ENCRYPT_DATA], - unix_permissions="unix_permissions_value", - description="description_value", - snap_reserve=0.1293, - snapshot_directory=True, - used_gib=834, - security_style=volume.SecurityStyle.NTFS, - kerberos_enabled=True, - ldap_enabled=True, - active_directory="active_directory_value", - kms_config="kms_config_value", - encryption_type=common.EncryptionType.SERVICE_MANAGED, - has_replication=True, - restricted_actions=[volume.RestrictedAction.DELETE], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_volume(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == "name_value" - assert response.state == volume.Volume.State.READY - assert response.state_details == "state_details_value" - assert response.share_name == "share_name_value" - assert response.psa_range == "psa_range_value" - assert response.storage_pool == "storage_pool_value" - assert response.network == "network_value" - assert response.service_level == common.ServiceLevel.PREMIUM - assert response.capacity_gib == 1247 - assert response.protocols == [volume.Protocols.NFSV3] - assert response.smb_settings == [volume.SMBSettings.ENCRYPT_DATA] - assert response.unix_permissions == "unix_permissions_value" - assert response.description == "description_value" - assert math.isclose(response.snap_reserve, 0.1293, rel_tol=1e-6) - assert response.snapshot_directory is True - assert response.used_gib == 834 - assert response.security_style == volume.SecurityStyle.NTFS - assert response.kerberos_enabled is True - assert response.ldap_enabled is True - assert response.active_directory == "active_directory_value" - assert response.kms_config == "kms_config_value" - assert response.encryption_type == common.EncryptionType.SERVICE_MANAGED - assert response.has_replication is True - assert response.restricted_actions == [volume.RestrictedAction.DELETE] + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = kms.ListKmsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = kms.ListKmsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_kms_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListKmsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRequest): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12432,21 +21653,30 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).list_kms_configs._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).list_kms_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12455,7 +21685,7 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = kms.ListKmsConfigsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12476,30 +21706,40 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) + return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_volume(request) + response = client.list_kms_configs(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_volume_rest_unset_required_fields(): +def test_list_kms_configs_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_kms_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_volume_rest_interceptors(null_interceptor): +def test_list_kms_configs_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -12510,13 +21750,13 @@ def test_get_volume_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_volume" + transports.NetAppRestInterceptor, "post_list_kms_configs" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_volume" + transports.NetAppRestInterceptor, "pre_list_kms_configs" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) + pb_message = kms.ListKmsConfigsRequest.pb(kms.ListKmsConfigsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12527,17 +21767,19 @@ def test_get_volume_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = volume.Volume.to_json(volume.Volume()) + req.return_value._content = kms.ListKmsConfigsResponse.to_json( + kms.ListKmsConfigsResponse() + ) - request = volume.GetVolumeRequest() + request = kms.ListKmsConfigsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = volume.Volume() + post.return_value = kms.ListKmsConfigsResponse() - client.get_volume( + client.list_kms_configs( request, metadata=[ ("key", "val"), @@ -12549,8 +21791,8 @@ def test_get_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_volume_rest_bad_request( - transport: str = "rest", request_type=volume.GetVolumeRequest +def test_list_kms_configs_rest_bad_request( + transport: str = "rest", request_type=kms.ListKmsConfigsRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12558,7 +21800,7 @@ def test_get_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12570,10 +21812,10 @@ def test_get_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_volume(request) + client.list_kms_configs(request) -def test_get_volume_rest_flattened(): +def test_list_kms_configs_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12582,14 +21824,14 @@ def test_get_volume_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = kms.ListKmsConfigsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -12597,24 +21839,24 @@ def test_get_volume_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) + return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_volume(**mock_args) + client.list_kms_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, args[1], ) -def test_get_volume_rest_flattened_error(transport: str = "rest"): +def test_list_kms_configs_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12623,26 +21865,81 @@ def test_get_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), - name="name_value", + client.list_kms_configs( + kms.ListKmsConfigsRequest(), + parent="parent_value", ) -def test_get_volume_rest_error(): +def test_list_kms_configs_rest_pager(transport: str = "rest"): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), + kms.KmsConfig(), + kms.KmsConfig(), + ], + next_page_token="abc", + ), + kms.ListKmsConfigsResponse( + kms_configs=[], + next_page_token="def", + ), + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), + ], + next_page_token="ghi", + ), + kms.ListKmsConfigsResponse( + kms_configs=[ + kms.KmsConfig(), + kms.KmsConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(kms.ListKmsConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_kms_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, kms.KmsConfig) for i in results) + + pages = list(client.list_kms_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - gcn_volume.CreateVolumeRequest, + kms.CreateKmsConfigRequest, dict, ], ) -def test_create_volume_rest(request_type): +def test_create_kms_config_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12650,90 +21947,23 @@ def test_create_volume_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["volume"] = { + request_init["kms_config"] = { "name": "name_value", + "crypto_key_name": "crypto_key_name_value", "state": 1, "state_details": "state_details_value", "create_time": {"seconds": 751, "nanos": 543}, - "share_name": "share_name_value", - "psa_range": "psa_range_value", - "storage_pool": "storage_pool_value", - "network": "network_value", - "service_level": 1, - "capacity_gib": 1247, - "export_policy": { - "rules": [ - { - "allowed_clients": "allowed_clients_value", - "has_root_access": "has_root_access_value", - "access_type": 1, - "nfsv3": True, - "nfsv4": True, - "kerberos_5_read_only": True, - "kerberos_5_read_write": True, - "kerberos_5i_read_only": True, - "kerberos_5i_read_write": True, - "kerberos_5p_read_only": True, - "kerberos_5p_read_write": True, - } - ] - }, - "protocols": [1], - "smb_settings": [1], - "mount_options": [ - { - "export": "export_value", - "export_full": "export_full_value", - "protocol": 1, - "instructions": "instructions_value", - } - ], - "unix_permissions": "unix_permissions_value", - "labels": {}, "description": "description_value", - "snapshot_policy": { - "enabled": True, - "hourly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - }, - "daily_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - }, - "weekly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - "day": "day_value", - }, - "monthly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - "days_of_month": "days_of_month_value", - }, - }, - "snap_reserve": 0.1293, - "snapshot_directory": True, - "used_gib": 834, - "security_style": 1, - "kerberos_enabled": True, - "ldap_enabled": True, - "active_directory": "active_directory_value", - "restore_parameters": {"source_snapshot": "source_snapshot_value"}, - "kms_config": "kms_config_value", - "encryption_type": 1, - "has_replication": True, - "restricted_actions": [1], + "labels": {}, + "instructions": "instructions_value", + "service_account": "service_account_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_volume.CreateVolumeRequest.meta.fields["volume"] + test_field = kms.CreateKmsConfigRequest.meta.fields["kms_config"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12761,7 +21991,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["volume"].items(): # pragma: NO COVER + for field, value in request_init["kms_config"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12791,10 +22021,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["volume"][field])): - del request_init["volume"][field][i][subfield] + for i in range(0, len(request_init["kms_config"][field])): + del request_init["kms_config"][field][i][subfield] else: - del request_init["volume"][field][subfield] + del request_init["kms_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12809,20 +22039,20 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_volume(request) + response = client.create_kms_config(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_volume_rest_required_fields( - request_type=gcn_volume.CreateVolumeRequest, +def test_create_kms_config_rest_required_fields( + request_type=kms.CreateKmsConfigRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["volume_id"] = "" + request_init["kms_config_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12834,32 +22064,32 @@ def test_create_volume_rest_required_fields( ) # verify fields with default values are dropped - assert "volumeId" not in jsonified_request + assert "kmsConfigId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_volume._get_unset_required_fields(jsonified_request) + ).create_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "volumeId" in jsonified_request - assert jsonified_request["volumeId"] == request_init["volume_id"] + assert "kmsConfigId" in jsonified_request + assert jsonified_request["kmsConfigId"] == request_init["kms_config_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["volumeId"] = "volume_id_value" + jsonified_request["kmsConfigId"] = "kms_config_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_volume._get_unset_required_fields(jsonified_request) + ).create_kms_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("volume_id",)) + assert not set(unset_fields) - set(("kms_config_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "volumeId" in jsonified_request - assert jsonified_request["volumeId"] == "volume_id_value" + assert "kmsConfigId" in jsonified_request + assert jsonified_request["kmsConfigId"] == "kms_config_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12893,11 +22123,11 @@ def test_create_volume_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_volume(request) + response = client.create_kms_config(request) expected_params = [ ( - "volumeId", + "kmsConfigId", "", ), ("$alt", "json;enum-encoding=int"), @@ -12906,26 +22136,26 @@ def test_create_volume_rest_required_fields( assert expected_params == actual_params -def test_create_volume_rest_unset_required_fields(): +def test_create_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_volume._get_unset_required_fields({}) + unset_fields = transport.create_kms_config._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("volumeId",)) + set(("kmsConfigId",)) & set( ( "parent", - "volumeId", - "volume", + "kmsConfigId", + "kmsConfig", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_volume_rest_interceptors(null_interceptor): +def test_create_kms_config_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -12938,13 +22168,13 @@ def test_create_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_volume" + transports.NetAppRestInterceptor, "post_create_kms_config" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_volume" + transports.NetAppRestInterceptor, "pre_create_kms_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_volume.CreateVolumeRequest.pb(gcn_volume.CreateVolumeRequest()) + pb_message = kms.CreateKmsConfigRequest.pb(kms.CreateKmsConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12959,7 +22189,7 @@ def test_create_volume_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_volume.CreateVolumeRequest() + request = kms.CreateKmsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -12967,7 +22197,7 @@ def test_create_volume_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_volume( + client.create_kms_config( request, metadata=[ ("key", "val"), @@ -12979,8 +22209,8 @@ def test_create_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_volume_rest_bad_request( - transport: str = "rest", request_type=gcn_volume.CreateVolumeRequest +def test_create_kms_config_rest_bad_request( + transport: str = "rest", request_type=kms.CreateKmsConfigRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13000,10 +22230,10 @@ def test_create_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_volume(request) + client.create_kms_config(request) -def test_create_volume_rest_flattened(): +def test_create_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13020,8 +22250,8 @@ def test_create_volume_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - volume=gcn_volume.Volume(name="name_value"), - volume_id="volume_id_value", + kms_config=kms.KmsConfig(name="name_value"), + kms_config_id="kms_config_id_value", ) mock_args.update(sample_request) @@ -13032,19 +22262,19 @@ def test_create_volume_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_volume(**mock_args) + client.create_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/volumes" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, args[1], ) -def test_create_volume_rest_flattened_error(transport: str = "rest"): +def test_create_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13053,15 +22283,15 @@ def test_create_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_volume( - gcn_volume.CreateVolumeRequest(), + client.create_kms_config( + kms.CreateKmsConfigRequest(), parent="parent_value", - volume=gcn_volume.Volume(name="name_value"), - volume_id="volume_id_value", + kms_config=kms.KmsConfig(name="name_value"), + kms_config_id="kms_config_id_value", ) -def test_create_volume_rest_error(): +def test_create_kms_config_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13070,191 +22300,60 @@ def test_create_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_volume.UpdateVolumeRequest, + kms.GetKmsConfigRequest, dict, ], ) -def test_update_volume_rest(request_type): +def test_get_kms_config_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} - } - request_init["volume"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3", - "state": 1, - "state_details": "state_details_value", - "create_time": {"seconds": 751, "nanos": 543}, - "share_name": "share_name_value", - "psa_range": "psa_range_value", - "storage_pool": "storage_pool_value", - "network": "network_value", - "service_level": 1, - "capacity_gib": 1247, - "export_policy": { - "rules": [ - { - "allowed_clients": "allowed_clients_value", - "has_root_access": "has_root_access_value", - "access_type": 1, - "nfsv3": True, - "nfsv4": True, - "kerberos_5_read_only": True, - "kerberos_5_read_write": True, - "kerberos_5i_read_only": True, - "kerberos_5i_read_write": True, - "kerberos_5p_read_only": True, - "kerberos_5p_read_write": True, - } - ] - }, - "protocols": [1], - "smb_settings": [1], - "mount_options": [ - { - "export": "export_value", - "export_full": "export_full_value", - "protocol": 1, - "instructions": "instructions_value", - } - ], - "unix_permissions": "unix_permissions_value", - "labels": {}, - "description": "description_value", - "snapshot_policy": { - "enabled": True, - "hourly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - }, - "daily_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - }, - "weekly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - "day": "day_value", - }, - "monthly_schedule": { - "snapshots_to_keep": 0.18330000000000002, - "minute": 0.658, - "hour": 0.446, - "days_of_month": "days_of_month_value", - }, - }, - "snap_reserve": 0.1293, - "snapshot_directory": True, - "used_gib": 834, - "security_style": 1, - "kerberos_enabled": True, - "ldap_enabled": True, - "active_directory": "active_directory_value", - "restore_parameters": {"source_snapshot": "source_snapshot_value"}, - "kms_config": "kms_config_value", - "encryption_type": 1, - "has_replication": True, - "restricted_actions": [1], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_volume.UpdateVolumeRequest.meta.fields["volume"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["volume"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["volume"][field])): - del request_init["volume"][field][i][subfield] - else: - del request_init["volume"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = kms.KmsConfig( + name="name_value", + crypto_key_name="crypto_key_name_value", + state=kms.KmsConfig.State.READY, + state_details="state_details_value", + description="description_value", + instructions="instructions_value", + service_account="service_account_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = kms.KmsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_volume(request) + response = client.get_kms_config(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, kms.KmsConfig) + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" + assert response.state == kms.KmsConfig.State.READY + assert response.state_details == "state_details_value" + assert response.description == "description_value" + assert response.instructions == "instructions_value" + assert response.service_account == "service_account_value" -def test_update_volume_rest_required_fields( - request_type=gcn_volume.UpdateVolumeRequest, -): +def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigRequest): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13269,19 +22368,21 @@ def test_update_volume_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) + ).get_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13290,7 +22391,7 @@ def test_update_volume_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = kms.KmsConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13302,45 +22403,39 @@ def test_update_volume_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = kms.KmsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_volume(request) + response = client.get_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_volume_rest_unset_required_fields(): +def test_get_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_volume._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "volume", - ) - ) - ) + unset_fields = transport.get_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_volume_rest_interceptors(null_interceptor): +def test_get_kms_config_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -13351,15 +22446,13 @@ def test_update_volume_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_volume" + transports.NetAppRestInterceptor, "post_get_kms_config" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_volume" + transports.NetAppRestInterceptor, "pre_get_kms_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_volume.UpdateVolumeRequest.pb(gcn_volume.UpdateVolumeRequest()) + pb_message = kms.GetKmsConfigRequest.pb(kms.GetKmsConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13370,19 +22463,17 @@ def test_update_volume_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = kms.KmsConfig.to_json(kms.KmsConfig()) - request = gcn_volume.UpdateVolumeRequest() + request = kms.GetKmsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = kms.KmsConfig() - client.update_volume( + client.get_kms_config( request, metadata=[ ("key", "val"), @@ -13394,8 +22485,8 @@ def test_update_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_volume_rest_bad_request( - transport: str = "rest", request_type=gcn_volume.UpdateVolumeRequest +def test_get_kms_config_rest_bad_request( + transport: str = "rest", request_type=kms.GetKmsConfigRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13403,9 +22494,7 @@ def test_update_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} - } + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13417,10 +22506,10 @@ def test_update_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_volume(request) + client.get_kms_config(request) -def test_update_volume_rest_flattened(): +def test_get_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13429,41 +22518,41 @@ def test_update_volume_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = kms.KmsConfig() # get arguments that satisfy an http rule for this method sample_request = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" } # get truthy value for each flattened field mock_args = dict( - volume=gcn_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = kms.KmsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_volume(**mock_args) + client.get_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{volume.name=projects/*/locations/*/volumes/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, args[1], ) -def test_update_volume_rest_flattened_error(transport: str = "rest"): +def test_get_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13472,14 +22561,13 @@ def test_update_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_volume( - gcn_volume.UpdateVolumeRequest(), - volume=gcn_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_kms_config( + kms.GetKmsConfigRequest(), + name="name_value", ) -def test_update_volume_rest_error(): +def test_get_kms_config_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13488,18 +22576,98 @@ def test_update_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - volume.DeleteVolumeRequest, + kms.UpdateKmsConfigRequest, dict, ], ) -def test_delete_volume_rest(request_type): +def test_update_kms_config_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = { + "kms_config": {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + } + request_init["kms_config"] = { + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3", + "crypto_key_name": "crypto_key_name_value", + "state": 1, + "state_details": "state_details_value", + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + "instructions": "instructions_value", + "service_account": "service_account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = kms.UpdateKmsConfigRequest.meta.fields["kms_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["kms_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["kms_config"][field])): + del request_init["kms_config"][field][i][subfield] + else: + del request_init["kms_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13514,17 +22682,18 @@ def test_delete_volume_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_volume(request) + response = client.update_kms_config(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequest): +def test_update_kms_config_rest_required_fields( + request_type=kms.UpdateKmsConfigRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13539,23 +22708,19 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_volume._get_unset_required_fields(jsonified_request) + ).update_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_volume._get_unset_required_fields(jsonified_request) + ).update_kms_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13576,9 +22741,10 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13588,24 +22754,32 @@ def test_delete_volume_rest_required_fields(request_type=volume.DeleteVolumeRequ response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_volume(request) + response = client.update_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_volume_rest_unset_required_fields(): +def test_update_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.update_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "kmsConfig", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_volume_rest_interceptors(null_interceptor): +def test_update_kms_config_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -13618,13 +22792,13 @@ def test_delete_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_volume" + transports.NetAppRestInterceptor, "post_update_kms_config" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_volume" + transports.NetAppRestInterceptor, "pre_update_kms_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.DeleteVolumeRequest.pb(volume.DeleteVolumeRequest()) + pb_message = kms.UpdateKmsConfigRequest.pb(kms.UpdateKmsConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13639,7 +22813,7 @@ def test_delete_volume_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = volume.DeleteVolumeRequest() + request = kms.UpdateKmsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13647,7 +22821,7 @@ def test_delete_volume_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_volume( + client.update_kms_config( request, metadata=[ ("key", "val"), @@ -13659,8 +22833,8 @@ def test_delete_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_volume_rest_bad_request( - transport: str = "rest", request_type=volume.DeleteVolumeRequest +def test_update_kms_config_rest_bad_request( + transport: str = "rest", request_type=kms.UpdateKmsConfigRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13668,7 +22842,9 @@ def test_delete_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = { + "kms_config": {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13680,10 +22856,10 @@ def test_delete_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_volume(request) + client.update_kms_config(request) -def test_delete_volume_rest_flattened(): +def test_update_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13695,11 +22871,16 @@ def test_delete_volume_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + sample_request = { + "kms_config": { + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + kms_config=kms.KmsConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -13710,19 +22891,20 @@ def test_delete_volume_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_volume(**mock_args) + client.update_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + "%s/v1/{kms_config.name=projects/*/locations/*/kmsConfigs/*}" + % client.transport._host, args[1], ) -def test_delete_volume_rest_flattened_error(transport: str = "rest"): +def test_update_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13731,13 +22913,14 @@ def test_delete_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_volume( - volume.DeleteVolumeRequest(), - name="name_value", + client.update_kms_config( + kms.UpdateKmsConfigRequest(), + kms_config=kms.KmsConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_volume_rest_error(): +def test_update_kms_config_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13746,18 +22929,18 @@ def test_delete_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - volume.RevertVolumeRequest, + kms.EncryptVolumesRequest, dict, ], ) -def test_revert_volume_rest(request_type): +def test_encrypt_volumes_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13772,18 +22955,17 @@ def test_revert_volume_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.revert_volume(request) + response = client.encrypt_volumes(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequest): +def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesRequest): transport_class = transports.NetAppRestTransport request_init = {} request_init["name"] = "" - request_init["snapshot_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13798,24 +22980,21 @@ def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).revert_volume._get_unset_required_fields(jsonified_request) + ).encrypt_volumes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["snapshotId"] = "snapshot_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).revert_volume._get_unset_required_fields(jsonified_request) + ).encrypt_volumes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == "snapshot_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13849,32 +23028,24 @@ def test_revert_volume_rest_required_fields(request_type=volume.RevertVolumeRequ response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.revert_volume(request) + response = client.encrypt_volumes(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_revert_volume_rest_unset_required_fields(): +def test_encrypt_volumes_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.revert_volume._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "snapshotId", - ) - ) - ) + unset_fields = transport.encrypt_volumes._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_revert_volume_rest_interceptors(null_interceptor): +def test_encrypt_volumes_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -13887,13 +23058,13 @@ def test_revert_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_revert_volume" + transports.NetAppRestInterceptor, "post_encrypt_volumes" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_revert_volume" + transports.NetAppRestInterceptor, "pre_encrypt_volumes" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.RevertVolumeRequest.pb(volume.RevertVolumeRequest()) + pb_message = kms.EncryptVolumesRequest.pb(kms.EncryptVolumesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13908,7 +23079,7 @@ def test_revert_volume_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = volume.RevertVolumeRequest() + request = kms.EncryptVolumesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13916,7 +23087,7 @@ def test_revert_volume_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.revert_volume( + client.encrypt_volumes( request, metadata=[ ("key", "val"), @@ -13928,8 +23099,8 @@ def test_revert_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_revert_volume_rest_bad_request( - transport: str = "rest", request_type=volume.RevertVolumeRequest +def test_encrypt_volumes_rest_bad_request( + transport: str = "rest", request_type=kms.EncryptVolumesRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13937,7 +23108,7 @@ def test_revert_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13949,10 +23120,10 @@ def test_revert_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.revert_volume(request) + client.encrypt_volumes(request) -def test_revert_volume_rest_error(): +def test_encrypt_volumes_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13961,52 +23132,54 @@ def test_revert_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - snapshot.ListSnapshotsRequest, + kms.VerifyKmsConfigRequest, dict, ], ) -def test_list_snapshots_rest(request_type): +def test_verify_kms_config_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = snapshot.ListSnapshotsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = kms.VerifyKmsConfigResponse( + healthy=True, + health_error="health_error_value", + instructions="instructions_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.ListSnapshotsResponse.pb(return_value) + return_value = kms.VerifyKmsConfigResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_snapshots(request) + response = client.verify_kms_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, kms.VerifyKmsConfigResponse) + assert response.healthy is True + assert response.health_error == "health_error_value" + assert response.instructions == "instructions_value" -def test_list_snapshots_rest_required_fields( - request_type=snapshot.ListSnapshotsRequest, +def test_verify_kms_config_rest_required_fields( + request_type=kms.VerifyKmsConfigRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14021,30 +23194,21 @@ def test_list_snapshots_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_snapshots._get_unset_required_fields(jsonified_request) + ).verify_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_snapshots._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).verify_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14053,7 +23217,7 @@ def test_list_snapshots_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = snapshot.ListSnapshotsResponse() + return_value = kms.VerifyKmsConfigResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14065,49 +23229,40 @@ def test_list_snapshots_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = snapshot.ListSnapshotsResponse.pb(return_value) + return_value = kms.VerifyKmsConfigResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_snapshots(request) + response = client.verify_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_snapshots_rest_unset_required_fields(): +def test_verify_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_snapshots._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.verify_kms_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_snapshots_rest_interceptors(null_interceptor): +def test_verify_kms_config_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -14118,13 +23273,13 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_snapshots" + transports.NetAppRestInterceptor, "post_verify_kms_config" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_snapshots" + transports.NetAppRestInterceptor, "pre_verify_kms_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = snapshot.ListSnapshotsRequest.pb(snapshot.ListSnapshotsRequest()) + pb_message = kms.VerifyKmsConfigRequest.pb(kms.VerifyKmsConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14135,227 +23290,98 @@ def test_list_snapshots_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = snapshot.ListSnapshotsResponse.to_json( - snapshot.ListSnapshotsResponse() + req.return_value._content = kms.VerifyKmsConfigResponse.to_json( + kms.VerifyKmsConfigResponse() ) - request = snapshot.ListSnapshotsRequest() + request = kms.VerifyKmsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = snapshot.ListSnapshotsResponse() + post.return_value = kms.VerifyKmsConfigResponse() - client.list_snapshots( + client.verify_kms_config( request, metadata=[ ("key", "val"), ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_snapshots_rest_bad_request( - transport: str = "rest", request_type=snapshot.ListSnapshotsRequest -): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_snapshots(request) - - -def test_list_snapshots_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = snapshot.ListSnapshotsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = snapshot.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_snapshots(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" - % client.transport._host, - args[1], - ) - - -def test_list_snapshots_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_snapshots( - snapshot.ListSnapshotsRequest(), - parent="parent_value", + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_list_snapshots_rest_pager(transport: str = "rest"): + +def test_verify_kms_config_rest_bad_request( + transport: str = "rest", request_type=kms.VerifyKmsConfigRequest +): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), - snapshot.Snapshot(), - snapshot.Snapshot(), - ], - next_page_token="abc", - ), - snapshot.ListSnapshotsResponse( - snapshots=[], - next_page_token="def", - ), - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), - ], - next_page_token="ghi", - ), - snapshot.ListSnapshotsResponse( - snapshots=[ - snapshot.Snapshot(), - snapshot.Snapshot(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(snapshot.ListSnapshotsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request = request_type(**request_init) - pager = client.list_snapshots(request=sample_request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.verify_kms_config(request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, snapshot.Snapshot) for i in results) - pages = list(client.list_snapshots(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_verify_kms_config_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - snapshot.GetSnapshotRequest, + kms.DeleteKmsConfigRequest, dict, ], ) -def test_get_snapshot_rest(request_type): +def test_delete_kms_config_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = snapshot.Snapshot( - name="name_value", - state=snapshot.Snapshot.State.READY, - state_details="state_details_value", - description="description_value", - used_bytes=0.10790000000000001, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = snapshot.Snapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_snapshot(request) + response = client.delete_kms_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, snapshot.Snapshot) - assert response.name == "name_value" - assert response.state == snapshot.Snapshot.State.READY - assert response.state_details == "state_details_value" - assert response.description == "description_value" - assert math.isclose(response.used_bytes, 0.10790000000000001, rel_tol=1e-6) + assert response.operation.name == "operations/spam" -def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequest): +def test_delete_kms_config_rest_required_fields( + request_type=kms.DeleteKmsConfigRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -14374,7 +23400,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_snapshot._get_unset_required_fields(jsonified_request) + ).delete_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14383,7 +23409,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_snapshot._get_unset_required_fields(jsonified_request) + ).delete_kms_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14397,7 +23423,7 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = snapshot.Snapshot() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14409,39 +23435,36 @@ def test_get_snapshot_rest_required_fields(request_type=snapshot.GetSnapshotRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = snapshot.Snapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_snapshot(request) + response = client.delete_kms_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_snapshot_rest_unset_required_fields(): +def test_delete_kms_config_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_snapshot._get_unset_required_fields({}) + unset_fields = transport.delete_kms_config._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_snapshot_rest_interceptors(null_interceptor): +def test_delete_kms_config_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -14452,13 +23475,15 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_snapshot" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_kms_config" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_snapshot" + transports.NetAppRestInterceptor, "pre_delete_kms_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = snapshot.GetSnapshotRequest.pb(snapshot.GetSnapshotRequest()) + pb_message = kms.DeleteKmsConfigRequest.pb(kms.DeleteKmsConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14469,17 +23494,19 @@ def test_get_snapshot_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = snapshot.Snapshot.to_json(snapshot.Snapshot()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = snapshot.GetSnapshotRequest() + request = kms.DeleteKmsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = snapshot.Snapshot() + post.return_value = operations_pb2.Operation() - client.get_snapshot( + client.delete_kms_config( request, metadata=[ ("key", "val"), @@ -14491,8 +23518,8 @@ def test_get_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_snapshot_rest_bad_request( - transport: str = "rest", request_type=snapshot.GetSnapshotRequest +def test_delete_kms_config_rest_bad_request( + transport: str = "rest", request_type=kms.DeleteKmsConfigRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14500,9 +23527,7 @@ def test_get_snapshot_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14514,10 +23539,10 @@ def test_get_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_snapshot(request) + client.delete_kms_config(request) -def test_get_snapshot_rest_flattened(): +def test_delete_kms_config_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14526,11 +23551,11 @@ def test_get_snapshot_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = snapshot.Snapshot() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" } # get truthy value for each flattened field @@ -14542,26 +23567,23 @@ def test_get_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = snapshot.Snapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_snapshot(**mock_args) + client.delete_kms_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, args[1], ) -def test_get_snapshot_rest_flattened_error(transport: str = "rest"): +def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14570,137 +23592,67 @@ def test_get_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_snapshot( - snapshot.GetSnapshotRequest(), + client.delete_kms_config( + kms.DeleteKmsConfigRequest(), name="name_value", ) -def test_get_snapshot_rest_error(): +def test_delete_kms_config_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - -@pytest.mark.parametrize( - "request_type", - [ - gcn_snapshot.CreateSnapshotRequest, - dict, - ], -) -def test_create_snapshot_rest(request_type): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} - request_init["snapshot"] = { - "name": "name_value", - "state": 1, - "state_details": "state_details_value", - "description": "description_value", - "used_bytes": 0.10790000000000001, - "create_time": {"seconds": 751, "nanos": 543}, - "labels": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_snapshot.CreateSnapshotRequest.meta.fields["snapshot"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["snapshot"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["snapshot"][field])): - del request_init["snapshot"][field][i][subfield] - else: - del request_init["snapshot"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + replication.ListReplicationsRequest, + dict, + ], +) +def test_list_replications_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.ListReplicationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_snapshot(request) + response = client.list_replications(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListReplicationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_snapshot_rest_required_fields( - request_type=gcn_snapshot.CreateSnapshotRequest, +def test_list_replications_rest_required_fields( + request_type=replication.ListReplicationsRequest, ): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" - request_init["snapshot_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14712,32 +23664,33 @@ def test_create_snapshot_rest_required_fields( ) # verify fields with default values are dropped - assert "snapshotId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_snapshot._get_unset_required_fields(jsonified_request) + ).list_replications._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == request_init["snapshot_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["snapshotId"] = "snapshot_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_snapshot._get_unset_required_fields(jsonified_request) + ).list_replications._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("snapshot_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "snapshotId" in jsonified_request - assert jsonified_request["snapshotId"] == "snapshot_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14746,7 +23699,7 @@ def test_create_snapshot_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.ListReplicationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14758,52 +23711,49 @@ def test_create_snapshot_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_snapshot(request) + response = client.list_replications(request) - expected_params = [ - ( - "snapshotId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_snapshot_rest_unset_required_fields(): +def test_list_replications_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_snapshot._get_unset_required_fields({}) + unset_fields = transport.list_replications._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("snapshotId",)) - & set( + set( ( - "parent", - "snapshot", - "snapshotId", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_snapshot_rest_interceptors(null_interceptor): +def test_list_replications_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -14814,16 +23764,14 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_snapshot" + transports.NetAppRestInterceptor, "post_list_replications" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_snapshot" + transports.NetAppRestInterceptor, "pre_list_replications" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_snapshot.CreateSnapshotRequest.pb( - gcn_snapshot.CreateSnapshotRequest() + pb_message = replication.ListReplicationsRequest.pb( + replication.ListReplicationsRequest() ) transcode.return_value = { "method": "post", @@ -14835,19 +23783,19 @@ def test_create_snapshot_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = replication.ListReplicationsResponse.to_json( + replication.ListReplicationsResponse() ) - request = gcn_snapshot.CreateSnapshotRequest() + request = replication.ListReplicationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = replication.ListReplicationsResponse() - client.create_snapshot( + client.list_replications( request, metadata=[ ("key", "val"), @@ -14859,8 +23807,8 @@ def test_create_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_snapshot_rest_bad_request( - transport: str = "rest", request_type=gcn_snapshot.CreateSnapshotRequest +def test_list_replications_rest_bad_request( + transport: str = "rest", request_type=replication.ListReplicationsRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14880,10 +23828,10 @@ def test_create_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_snapshot(request) + client.list_replications(request) -def test_create_snapshot_rest_flattened(): +def test_list_replications_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14892,7 +23840,7 @@ def test_create_snapshot_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.ListReplicationsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -14902,32 +23850,32 @@ def test_create_snapshot_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - snapshot=gcn_snapshot.Snapshot(name="name_value"), - snapshot_id="snapshot_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = replication.ListReplicationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_snapshot(**mock_args) + client.list_replications(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/snapshots" + "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" % client.transport._host, args[1], ) -def test_create_snapshot_rest_flattened_error(transport: str = "rest"): +def test_list_replications_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14936,28 +23884,85 @@ def test_create_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_snapshot( - gcn_snapshot.CreateSnapshotRequest(), + client.list_replications( + replication.ListReplicationsRequest(), parent="parent_value", - snapshot=gcn_snapshot.Snapshot(name="name_value"), - snapshot_id="snapshot_id_value", ) -def test_create_snapshot_rest_error(): +def test_list_replications_rest_pager(transport: str = "rest"): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + replication.Replication(), + replication.Replication(), + ], + next_page_token="abc", + ), + replication.ListReplicationsResponse( + replications=[], + next_page_token="def", + ), + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + ], + next_page_token="ghi", + ), + replication.ListReplicationsResponse( + replications=[ + replication.Replication(), + replication.Replication(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + replication.ListReplicationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + pager = client.list_replications(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, replication.Replication) for i in results) + + pages = list(client.list_replications(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - snapshot.DeleteSnapshotRequest, + replication.GetReplicationRequest, dict, ], ) -def test_delete_snapshot_rest(request_type): +def test_get_replication_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14965,30 +23970,56 @@ def test_delete_snapshot_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.Replication( + name="name_value", + state=replication.Replication.State.CREATING, + state_details="state_details_value", + role=replication.Replication.ReplicationRole.SOURCE, + replication_schedule=replication.Replication.ReplicationSchedule.EVERY_10_MINUTES, + mirror_state=replication.Replication.MirrorState.PREPARING, + healthy=True, + destination_volume="destination_volume_value", + description="description_value", + source_volume="source_volume_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_snapshot(request) + response = client.get_replication(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, replication.Replication) + assert response.name == "name_value" + assert response.state == replication.Replication.State.CREATING + assert response.state_details == "state_details_value" + assert response.role == replication.Replication.ReplicationRole.SOURCE + assert ( + response.replication_schedule + == replication.Replication.ReplicationSchedule.EVERY_10_MINUTES + ) + assert response.mirror_state == replication.Replication.MirrorState.PREPARING + assert response.healthy is True + assert response.destination_volume == "destination_volume_value" + assert response.description == "description_value" + assert response.source_volume == "source_volume_value" -def test_delete_snapshot_rest_required_fields( - request_type=snapshot.DeleteSnapshotRequest, +def test_get_replication_rest_required_fields( + request_type=replication.GetReplicationRequest, ): transport_class = transports.NetAppRestTransport @@ -15008,7 +24039,7 @@ def test_delete_snapshot_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_snapshot._get_unset_required_fields(jsonified_request) + ).get_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15017,7 +24048,7 @@ def test_delete_snapshot_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_snapshot._get_unset_required_fields(jsonified_request) + ).get_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15031,7 +24062,7 @@ def test_delete_snapshot_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.Replication() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15043,36 +24074,39 @@ def test_delete_snapshot_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_snapshot(request) + response = client.get_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_snapshot_rest_unset_required_fields(): +def test_get_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_snapshot._get_unset_required_fields({}) + unset_fields = transport.get_replication._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_snapshot_rest_interceptors(null_interceptor): +def test_get_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -15083,15 +24117,15 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_snapshot" + transports.NetAppRestInterceptor, "post_get_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_snapshot" + transports.NetAppRestInterceptor, "pre_get_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = snapshot.DeleteSnapshotRequest.pb(snapshot.DeleteSnapshotRequest()) + pb_message = replication.GetReplicationRequest.pb( + replication.GetReplicationRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15102,19 +24136,19 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = replication.Replication.to_json( + replication.Replication() ) - request = snapshot.DeleteSnapshotRequest() + request = replication.GetReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = replication.Replication() - client.delete_snapshot( + client.get_replication( request, metadata=[ ("key", "val"), @@ -15126,8 +24160,8 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_snapshot_rest_bad_request( - transport: str = "rest", request_type=snapshot.DeleteSnapshotRequest +def test_get_replication_rest_bad_request( + transport: str = "rest", request_type=replication.GetReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15136,7 +24170,7 @@ def test_delete_snapshot_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } request = request_type(**request_init) @@ -15149,10 +24183,10 @@ def test_delete_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_snapshot(request) + client.get_replication(request) -def test_delete_snapshot_rest_flattened(): +def test_get_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15161,11 +24195,11 @@ def test_delete_snapshot_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = replication.Replication() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } # get truthy value for each flattened field @@ -15177,24 +24211,26 @@ def test_delete_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_snapshot(**mock_args) + client.get_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/snapshots/*}" + "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" % client.transport._host, args[1], ) -def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): +def test_get_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15203,13 +24239,13 @@ def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_snapshot( - snapshot.DeleteSnapshotRequest(), + client.get_replication( + replication.GetReplicationRequest(), name="name_value", ) -def test_delete_snapshot_rest_error(): +def test_get_replication_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15218,37 +24254,54 @@ def test_delete_snapshot_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_snapshot.UpdateSnapshotRequest, + gcn_replication.CreateReplicationRequest, dict, ], ) -def test_update_snapshot_rest(request_type): +def test_create_replication_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "snapshot": { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } - } - request_init["snapshot"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4", + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init["replication"] = { + "name": "name_value", "state": 1, "state_details": "state_details_value", - "description": "description_value", - "used_bytes": 0.10790000000000001, + "role": 1, + "replication_schedule": 1, + "mirror_state": 1, + "healthy": True, "create_time": {"seconds": 751, "nanos": 543}, + "destination_volume": "destination_volume_value", + "transfer_stats": { + "transfer_bytes": 1515, + "total_transfer_duration": {"seconds": 751, "nanos": 543}, + "last_transfer_bytes": 2046, + "last_transfer_duration": {}, + "lag_duration": {}, + "update_time": {}, + "last_transfer_end_time": {}, + "last_transfer_error": "last_transfer_error_value", + }, "labels": {}, + "description": "description_value", + "destination_volume_parameters": { + "storage_pool": "storage_pool_value", + "volume_id": "volume_id_value", + "share_name": "share_name_value", + "description": "description_value", + }, + "source_volume": "source_volume_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_snapshot.UpdateSnapshotRequest.meta.fields["snapshot"] + test_field = gcn_replication.CreateReplicationRequest.meta.fields["replication"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -15276,7 +24329,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["snapshot"].items(): # pragma: NO COVER + for field, value in request_init["replication"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -15306,10 +24359,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["snapshot"][field])): - del request_init["snapshot"][field][i][subfield] + for i in range(0, len(request_init["replication"][field])): + del request_init["replication"][field][i][subfield] else: - del request_init["snapshot"][field][subfield] + del request_init["replication"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15324,18 +24377,20 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_snapshot(request) + response = client.create_replication(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_snapshot_rest_required_fields( - request_type=gcn_snapshot.UpdateSnapshotRequest, +def test_create_replication_rest_required_fields( + request_type=gcn_replication.CreateReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["parent"] = "" + request_init["replication_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15347,22 +24402,32 @@ def test_update_snapshot_rest_required_fields( ) # verify fields with default values are dropped + assert "replicationId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_snapshot._get_unset_required_fields(jsonified_request) + ).create_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "replicationId" in jsonified_request + assert jsonified_request["replicationId"] == request_init["replication_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["replicationId"] = "replication_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_snapshot._get_unset_required_fields(jsonified_request) + ).create_replication._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("replication_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "replicationId" in jsonified_request + assert jsonified_request["replicationId"] == "replication_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15383,7 +24448,7 @@ def test_update_snapshot_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -15396,32 +24461,39 @@ def test_update_snapshot_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_snapshot(request) + response = client.create_replication(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "replicationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_snapshot_rest_unset_required_fields(): +def test_create_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_snapshot._get_unset_required_fields({}) + unset_fields = transport.create_replication._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(("replicationId",)) & set( ( - "updateMask", - "snapshot", + "parent", + "replication", + "replicationId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_snapshot_rest_interceptors(null_interceptor): +def test_create_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -15434,14 +24506,14 @@ def test_update_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_snapshot" + transports.NetAppRestInterceptor, "post_create_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_snapshot" + transports.NetAppRestInterceptor, "pre_create_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_snapshot.UpdateSnapshotRequest.pb( - gcn_snapshot.UpdateSnapshotRequest() + pb_message = gcn_replication.CreateReplicationRequest.pb( + gcn_replication.CreateReplicationRequest() ) transcode.return_value = { "method": "post", @@ -15457,7 +24529,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_snapshot.UpdateSnapshotRequest() + request = gcn_replication.CreateReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -15465,7 +24537,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_snapshot( + client.create_replication( request, metadata=[ ("key", "val"), @@ -15477,8 +24549,8 @@ def test_update_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_snapshot_rest_bad_request( - transport: str = "rest", request_type=gcn_snapshot.UpdateSnapshotRequest +def test_create_replication_rest_bad_request( + transport: str = "rest", request_type=gcn_replication.CreateReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15486,11 +24558,7 @@ def test_update_snapshot_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "snapshot": { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } - } + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15502,10 +24570,10 @@ def test_update_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_snapshot(request) + client.create_replication(request) -def test_update_snapshot_rest_flattened(): +def test_create_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15518,15 +24586,14 @@ def test_update_snapshot_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "snapshot": { - "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" - } + "parent": "projects/sample1/locations/sample2/volumes/sample3" } # get truthy value for each flattened field mock_args = dict( - snapshot=gcn_snapshot.Snapshot(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + replication=gcn_replication.Replication(name="name_value"), + replication_id="replication_id_value", ) mock_args.update(sample_request) @@ -15537,20 +24604,20 @@ def test_update_snapshot_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_snapshot(**mock_args) + client.create_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{snapshot.name=projects/*/locations/*/volumes/*/snapshots/*}" + "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" % client.transport._host, args[1], ) -def test_update_snapshot_rest_flattened_error(transport: str = "rest"): +def test_create_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15559,14 +24626,15 @@ def test_update_snapshot_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_snapshot( - gcn_snapshot.UpdateSnapshotRequest(), - snapshot=gcn_snapshot.Snapshot(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_replication( + gcn_replication.CreateReplicationRequest(), + parent="parent_value", + replication=gcn_replication.Replication(name="name_value"), + replication_id="replication_id_value", ) -def test_update_snapshot_rest_error(): +def test_create_replication_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15575,52 +24643,47 @@ def test_update_snapshot_rest_error(): @pytest.mark.parametrize( "request_type", [ - active_directory.ListActiveDirectoriesRequest, + replication.DeleteReplicationRequest, dict, ], ) -def test_list_active_directories_rest(request_type): +def test_delete_replication_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ListActiveDirectoriesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_active_directories(request) + response = client.delete_replication(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListActiveDirectoriesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_active_directories_rest_required_fields( - request_type=active_directory.ListActiveDirectoriesRequest, +def test_delete_replication_rest_required_fields( + request_type=replication.DeleteReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15635,30 +24698,21 @@ def test_list_active_directories_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_active_directories._get_unset_required_fields(jsonified_request) + ).delete_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_active_directories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15667,7 +24721,7 @@ def test_list_active_directories_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = active_directory.ListActiveDirectoriesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15679,51 +24733,36 @@ def test_list_active_directories_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = active_directory.ListActiveDirectoriesResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_active_directories(request) + response = client.delete_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_active_directories_rest_unset_required_fields(): +def test_delete_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_active_directories._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.delete_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_active_directories_rest_interceptors(null_interceptor): +def test_delete_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -15734,14 +24773,16 @@ def test_list_active_directories_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_active_directories" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_active_directories" + transports.NetAppRestInterceptor, "pre_delete_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = active_directory.ListActiveDirectoriesRequest.pb( - active_directory.ListActiveDirectoriesRequest() + pb_message = replication.DeleteReplicationRequest.pb( + replication.DeleteReplicationRequest() ) transcode.return_value = { "method": "post", @@ -15753,21 +24794,19 @@ def test_list_active_directories_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - active_directory.ListActiveDirectoriesResponse.to_json( - active_directory.ListActiveDirectoriesResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = active_directory.ListActiveDirectoriesRequest() + request = replication.DeleteReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = active_directory.ListActiveDirectoriesResponse() + post.return_value = operations_pb2.Operation() - client.list_active_directories( + client.delete_replication( request, metadata=[ ("key", "val"), @@ -15779,8 +24818,8 @@ def test_list_active_directories_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_active_directories_rest_bad_request( - transport: str = "rest", request_type=active_directory.ListActiveDirectoriesRequest +def test_delete_replication_rest_bad_request( + transport: str = "rest", request_type=replication.DeleteReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15788,7 +24827,9 @@ def test_list_active_directories_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15800,10 +24841,10 @@ def test_list_active_directories_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_active_directories(request) + client.delete_replication(request) -def test_list_active_directories_rest_flattened(): +def test_delete_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15812,40 +24853,40 @@ def test_list_active_directories_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ListActiveDirectoriesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = active_directory.ListActiveDirectoriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_active_directories(**mock_args) + client.delete_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/activeDirectories" + "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" % client.transport._host, args[1], ) -def test_list_active_directories_rest_flattened_error(transport: str = "rest"): +def test_delete_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15854,83 +24895,26 @@ def test_list_active_directories_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_active_directories( - active_directory.ListActiveDirectoriesRequest(), - parent="parent_value", + client.delete_replication( + replication.DeleteReplicationRequest(), + name="name_value", ) -def test_list_active_directories_rest_pager(transport: str = "rest"): +def test_delete_replication_rest_error(): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), - ], - next_page_token="abc", - ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[], - next_page_token="def", - ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), - ], - next_page_token="ghi", - ), - active_directory.ListActiveDirectoriesResponse( - active_directories=[ - active_directory.ActiveDirectory(), - active_directory.ActiveDirectory(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - active_directory.ListActiveDirectoriesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_active_directories(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, active_directory.ActiveDirectory) for i in results) - - pages = list(client.list_active_directories(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - active_directory.GetActiveDirectoryRequest, + gcn_replication.UpdateReplicationRequest, dict, ], ) -def test_get_active_directory_rest(request_type): +def test_update_replication_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15938,76 +24922,133 @@ def test_get_active_directory_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "replication": { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } + } + request_init["replication"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4", + "state": 1, + "state_details": "state_details_value", + "role": 1, + "replication_schedule": 1, + "mirror_state": 1, + "healthy": True, + "create_time": {"seconds": 751, "nanos": 543}, + "destination_volume": "destination_volume_value", + "transfer_stats": { + "transfer_bytes": 1515, + "total_transfer_duration": {"seconds": 751, "nanos": 543}, + "last_transfer_bytes": 2046, + "last_transfer_duration": {}, + "lag_duration": {}, + "update_time": {}, + "last_transfer_end_time": {}, + "last_transfer_error": "last_transfer_error_value", + }, + "labels": {}, + "description": "description_value", + "destination_volume_parameters": { + "storage_pool": "storage_pool_value", + "volume_id": "volume_id_value", + "share_name": "share_name_value", + "description": "description_value", + }, + "source_volume": "source_volume_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_replication.UpdateReplicationRequest.meta.fields["replication"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["replication"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["replication"][field])): + del request_init["replication"][field][i][subfield] + else: + del request_init["replication"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ActiveDirectory( - name="name_value", - state=active_directory.ActiveDirectory.State.CREATING, - domain="domain_value", - site="site_value", - dns="dns_value", - net_bios_prefix="net_bios_prefix_value", - organizational_unit="organizational_unit_value", - aes_encryption=True, - username="username_value", - password="password_value", - backup_operators=["backup_operators_value"], - security_operators=["security_operators_value"], - kdc_hostname="kdc_hostname_value", - kdc_ip="kdc_ip_value", - nfs_users_with_ldap=True, - description="description_value", - ldap_signing=True, - encrypt_dc_connections=True, - state_details="state_details_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_active_directory(request) + response = client.update_replication(request) # Establish that the response is the type that we expect. - assert isinstance(response, active_directory.ActiveDirectory) - assert response.name == "name_value" - assert response.state == active_directory.ActiveDirectory.State.CREATING - assert response.domain == "domain_value" - assert response.site == "site_value" - assert response.dns == "dns_value" - assert response.net_bios_prefix == "net_bios_prefix_value" - assert response.organizational_unit == "organizational_unit_value" - assert response.aes_encryption is True - assert response.username == "username_value" - assert response.password == "password_value" - assert response.backup_operators == ["backup_operators_value"] - assert response.security_operators == ["security_operators_value"] - assert response.kdc_hostname == "kdc_hostname_value" - assert response.kdc_ip == "kdc_ip_value" - assert response.nfs_users_with_ldap is True - assert response.description == "description_value" - assert response.ldap_signing is True - assert response.encrypt_dc_connections is True - assert response.state_details == "state_details_value" + assert response.operation.name == "operations/spam" -def test_get_active_directory_rest_required_fields( - request_type=active_directory.GetActiveDirectoryRequest, +def test_update_replication_rest_required_fields( + request_type=gcn_replication.UpdateReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16022,21 +25063,19 @@ def test_get_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_active_directory._get_unset_required_fields(jsonified_request) + ).update_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_active_directory._get_unset_required_fields(jsonified_request) + ).update_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16045,7 +25084,7 @@ def test_get_active_directory_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = active_directory.ActiveDirectory() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16057,39 +25096,45 @@ def test_get_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_active_directory(request) + response = client.update_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_active_directory_rest_unset_required_fields(): +def test_update_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_active_directory._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_replication._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "replication", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_active_directory_rest_interceptors(null_interceptor): +def test_update_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -16100,14 +25145,16 @@ def test_get_active_directory_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_active_directory" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_update_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_active_directory" + transports.NetAppRestInterceptor, "pre_update_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = active_directory.GetActiveDirectoryRequest.pb( - active_directory.GetActiveDirectoryRequest() + pb_message = gcn_replication.UpdateReplicationRequest.pb( + gcn_replication.UpdateReplicationRequest() ) transcode.return_value = { "method": "post", @@ -16119,19 +25166,19 @@ def test_get_active_directory_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = active_directory.ActiveDirectory.to_json( - active_directory.ActiveDirectory() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = active_directory.GetActiveDirectoryRequest() + request = gcn_replication.UpdateReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = active_directory.ActiveDirectory() + post.return_value = operations_pb2.Operation() - client.get_active_directory( + client.update_replication( request, metadata=[ ("key", "val"), @@ -16143,8 +25190,8 @@ def test_get_active_directory_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_active_directory_rest_bad_request( - transport: str = "rest", request_type=active_directory.GetActiveDirectoryRequest +def test_update_replication_rest_bad_request( + transport: str = "rest", request_type=gcn_replication.UpdateReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16153,7 +25200,9 @@ def test_get_active_directory_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "replication": { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } } request = request_type(**request_init) @@ -16166,10 +25215,10 @@ def test_get_active_directory_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_active_directory(request) + client.update_replication(request) -def test_get_active_directory_rest_flattened(): +def test_update_replication_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16178,42 +25227,43 @@ def test_get_active_directory_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = active_directory.ActiveDirectory() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "replication": { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + replication=gcn_replication.Replication(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = active_directory.ActiveDirectory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_active_directory(**mock_args) + client.update_replication(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" + "%s/v1/{replication.name=projects/*/locations/*/volumes/*/replications/*}" % client.transport._host, args[1], ) -def test_get_active_directory_rest_flattened_error(transport: str = "rest"): +def test_update_replication_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16222,13 +25272,14 @@ def test_get_active_directory_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_active_directory( - active_directory.GetActiveDirectoryRequest(), - name="name_value", + client.update_replication( + gcn_replication.UpdateReplicationRequest(), + replication=gcn_replication.Replication(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_active_directory_rest_error(): +def test_update_replication_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16237,113 +25288,20 @@ def test_get_active_directory_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_active_directory.CreateActiveDirectoryRequest, + replication.StopReplicationRequest, dict, ], ) -def test_create_active_directory_rest(request_type): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["active_directory"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "domain": "domain_value", - "site": "site_value", - "dns": "dns_value", - "net_bios_prefix": "net_bios_prefix_value", - "organizational_unit": "organizational_unit_value", - "aes_encryption": True, - "username": "username_value", - "password": "password_value", - "backup_operators": ["backup_operators_value1", "backup_operators_value2"], - "security_operators": [ - "security_operators_value1", - "security_operators_value2", - ], - "kdc_hostname": "kdc_hostname_value", - "kdc_ip": "kdc_ip_value", - "nfs_users_with_ldap": True, - "description": "description_value", - "ldap_signing": True, - "encrypt_dc_connections": True, - "labels": {}, - "state_details": "state_details_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_active_directory.CreateActiveDirectoryRequest.meta.fields[ - "active_directory" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["active_directory"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["active_directory"][field])): - del request_init["active_directory"][field][i][subfield] - else: - del request_init["active_directory"][field][subfield] +def test_stop_replication_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16358,20 +25316,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_active_directory(request) + response = client.stop_replication(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_active_directory_rest_required_fields( - request_type=gcn_active_directory.CreateActiveDirectoryRequest, +def test_stop_replication_rest_required_fields( + request_type=replication.StopReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["active_directory_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16383,32 +25340,24 @@ def test_create_active_directory_rest_required_fields( ) # verify fields with default values are dropped - assert "activeDirectoryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_active_directory._get_unset_required_fields(jsonified_request) + ).stop_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "activeDirectoryId" in jsonified_request - assert jsonified_request["activeDirectoryId"] == request_init["active_directory_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["activeDirectoryId"] = "active_directory_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_active_directory._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("active_directory_id",)) + ).stop_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "activeDirectoryId" in jsonified_request - assert jsonified_request["activeDirectoryId"] == "active_directory_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16442,39 +25391,24 @@ def test_create_active_directory_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_active_directory(request) + response = client.stop_replication(request) - expected_params = [ - ( - "activeDirectoryId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_active_directory_rest_unset_required_fields(): +def test_stop_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_active_directory._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("activeDirectoryId",)) - & set( - ( - "parent", - "activeDirectory", - "activeDirectoryId", - ) - ) - ) + unset_fields = transport.stop_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_active_directory_rest_interceptors(null_interceptor): +def test_stop_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -16487,14 +25421,14 @@ def test_create_active_directory_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_active_directory" + transports.NetAppRestInterceptor, "post_stop_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_active_directory" + transports.NetAppRestInterceptor, "pre_stop_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_active_directory.CreateActiveDirectoryRequest.pb( - gcn_active_directory.CreateActiveDirectoryRequest() + pb_message = replication.StopReplicationRequest.pb( + replication.StopReplicationRequest() ) transcode.return_value = { "method": "post", @@ -16510,7 +25444,7 @@ def test_create_active_directory_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_active_directory.CreateActiveDirectoryRequest() + request = replication.StopReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16518,7 +25452,7 @@ def test_create_active_directory_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_active_directory( + client.stop_replication( request, metadata=[ ("key", "val"), @@ -16530,9 +25464,8 @@ def test_create_active_directory_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_active_directory_rest_bad_request( - transport: str = "rest", - request_type=gcn_active_directory.CreateActiveDirectoryRequest, +def test_stop_replication_rest_bad_request( + transport: str = "rest", request_type=replication.StopReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16540,7 +25473,9 @@ def test_create_active_directory_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16552,69 +25487,10 @@ def test_create_active_directory_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_active_directory(request) - - -def test_create_active_directory_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - active_directory_id="active_directory_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_active_directory(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/activeDirectories" - % client.transport._host, - args[1], - ) - - -def test_create_active_directory_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_active_directory( - gcn_active_directory.CreateActiveDirectoryRequest(), - parent="parent_value", - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - active_directory_id="active_directory_id_value", - ) + client.stop_replication(request) -def test_create_active_directory_rest_error(): +def test_stop_replication_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16623,117 +25499,20 @@ def test_create_active_directory_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_active_directory.UpdateActiveDirectoryRequest, + replication.ResumeReplicationRequest, dict, ], ) -def test_update_active_directory_rest(request_type): +def test_resume_replication_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "active_directory": { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } - } - request_init["active_directory"] = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "domain": "domain_value", - "site": "site_value", - "dns": "dns_value", - "net_bios_prefix": "net_bios_prefix_value", - "organizational_unit": "organizational_unit_value", - "aes_encryption": True, - "username": "username_value", - "password": "password_value", - "backup_operators": ["backup_operators_value1", "backup_operators_value2"], - "security_operators": [ - "security_operators_value1", - "security_operators_value2", - ], - "kdc_hostname": "kdc_hostname_value", - "kdc_ip": "kdc_ip_value", - "nfs_users_with_ldap": True, - "description": "description_value", - "ldap_signing": True, - "encrypt_dc_connections": True, - "labels": {}, - "state_details": "state_details_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_active_directory.UpdateActiveDirectoryRequest.meta.fields[ - "active_directory" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["active_directory"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["active_directory"][field])): - del request_init["active_directory"][field][i][subfield] - else: - del request_init["active_directory"][field][subfield] + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16748,18 +25527,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_active_directory(request) + response = client.resume_replication(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_active_directory_rest_required_fields( - request_type=gcn_active_directory.UpdateActiveDirectoryRequest, +def test_resume_replication_rest_required_fields( + request_type=replication.ResumeReplicationRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16774,19 +25554,21 @@ def test_update_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_active_directory._get_unset_required_fields(jsonified_request) + ).resume_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_active_directory._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).resume_replication._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16807,7 +25589,7 @@ def test_update_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -16820,32 +25602,24 @@ def test_update_active_directory_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_active_directory(request) + response = client.resume_replication(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_active_directory_rest_unset_required_fields(): +def test_resume_replication_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_active_directory._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "activeDirectory", - ) - ) - ) + unset_fields = transport.resume_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_active_directory_rest_interceptors(null_interceptor): +def test_resume_replication_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -16858,14 +25632,14 @@ def test_update_active_directory_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_active_directory" + transports.NetAppRestInterceptor, "post_resume_replication" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_active_directory" + transports.NetAppRestInterceptor, "pre_resume_replication" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_active_directory.UpdateActiveDirectoryRequest.pb( - gcn_active_directory.UpdateActiveDirectoryRequest() + pb_message = replication.ResumeReplicationRequest.pb( + replication.ResumeReplicationRequest() ) transcode.return_value = { "method": "post", @@ -16881,7 +25655,7 @@ def test_update_active_directory_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_active_directory.UpdateActiveDirectoryRequest() + request = replication.ResumeReplicationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16889,7 +25663,7 @@ def test_update_active_directory_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_active_directory( + client.resume_replication( request, metadata=[ ("key", "val"), @@ -16901,9 +25675,8 @@ def test_update_active_directory_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_active_directory_rest_bad_request( - transport: str = "rest", - request_type=gcn_active_directory.UpdateActiveDirectoryRequest, +def test_resume_replication_rest_bad_request( + transport: str = "rest", request_type=replication.ResumeReplicationRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16912,9 +25685,7 @@ def test_update_active_directory_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "active_directory": { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } request = request_type(**request_init) @@ -16927,71 +25698,10 @@ def test_update_active_directory_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_active_directory(request) - - -def test_update_active_directory_rest_flattened(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "active_directory": { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_active_directory(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{active_directory.name=projects/*/locations/*/activeDirectories/*}" - % client.transport._host, - args[1], - ) - - -def test_update_active_directory_rest_flattened_error(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_active_directory( - gcn_active_directory.UpdateActiveDirectoryRequest(), - active_directory=gcn_active_directory.ActiveDirectory(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.resume_replication(request) -def test_update_active_directory_rest_error(): +def test_resume_replication_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17000,11 +25710,11 @@ def test_update_active_directory_rest_error(): @pytest.mark.parametrize( "request_type", [ - active_directory.DeleteActiveDirectoryRequest, + replication.ReverseReplicationDirectionRequest, dict, ], ) -def test_delete_active_directory_rest(request_type): +def test_reverse_replication_direction_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17012,7 +25722,7 @@ def test_delete_active_directory_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } request = request_type(**request_init) @@ -17028,14 +25738,14 @@ def test_delete_active_directory_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_active_directory(request) + response = client.reverse_replication_direction(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_active_directory_rest_required_fields( - request_type=active_directory.DeleteActiveDirectoryRequest, +def test_reverse_replication_direction_rest_required_fields( + request_type=replication.ReverseReplicationDirectionRequest, ): transport_class = transports.NetAppRestTransport @@ -17055,7 +25765,7 @@ def test_delete_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_active_directory._get_unset_required_fields(jsonified_request) + ).reverse_replication_direction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17064,7 +25774,7 @@ def test_delete_active_directory_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_active_directory._get_unset_required_fields(jsonified_request) + ).reverse_replication_direction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17090,9 +25800,10 @@ def test_delete_active_directory_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -17102,24 +25813,26 @@ def test_delete_active_directory_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_active_directory(request) + response = client.reverse_replication_direction(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_active_directory_rest_unset_required_fields(): +def test_reverse_replication_direction_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_active_directory._get_unset_required_fields({}) + unset_fields = transport.reverse_replication_direction._get_unset_required_fields( + {} + ) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_active_directory_rest_interceptors(null_interceptor): +def test_reverse_replication_direction_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -17132,14 +25845,14 @@ def test_delete_active_directory_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_active_directory" + transports.NetAppRestInterceptor, "post_reverse_replication_direction" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_active_directory" + transports.NetAppRestInterceptor, "pre_reverse_replication_direction" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = active_directory.DeleteActiveDirectoryRequest.pb( - active_directory.DeleteActiveDirectoryRequest() + pb_message = replication.ReverseReplicationDirectionRequest.pb( + replication.ReverseReplicationDirectionRequest() ) transcode.return_value = { "method": "post", @@ -17155,7 +25868,7 @@ def test_delete_active_directory_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = active_directory.DeleteActiveDirectoryRequest() + request = replication.ReverseReplicationDirectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -17163,7 +25876,7 @@ def test_delete_active_directory_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_active_directory( + client.reverse_replication_direction( request, metadata=[ ("key", "val"), @@ -17175,8 +25888,8 @@ def test_delete_active_directory_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_active_directory_rest_bad_request( - transport: str = "rest", request_type=active_directory.DeleteActiveDirectoryRequest +def test_reverse_replication_direction_rest_bad_request( + transport: str = "rest", request_type=replication.ReverseReplicationDirectionRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17185,7 +25898,7 @@ def test_delete_active_directory_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" } request = request_type(**request_init) @@ -17198,119 +25911,132 @@ def test_delete_active_directory_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_active_directory(request) + client.reverse_replication_direction(request) -def test_delete_active_directory_rest_flattened(): +def test_reverse_replication_direction_rest_error(): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/activeDirectories/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_active_directory(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/activeDirectories/*}" - % client.transport._host, - args[1], - ) - -def test_delete_active_directory_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + gcn_backup_vault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_active_directory( - active_directory.DeleteActiveDirectoryRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "description": "description_value", + "labels": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup_vault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_delete_active_directory_rest_error(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - kms.ListKmsConfigsRequest, - dict, - ], -) -def test_list_kms_configs_rest(request_type): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.ListKmsConfigsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_kms_configs(request) + response = client.create_backup_vault(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListKmsConfigsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRequest): +def test_create_backup_vault_rest_required_fields( + request_type=gcn_backup_vault.CreateBackupVaultRequest, +): transport_class = transports.NetAppRestTransport request_init = {} request_init["parent"] = "" + request_init["backup_vault_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17322,33 +26048,32 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe ) # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_kms_configs._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_kms_configs._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("backup_vault_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17357,7 +26082,7 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = kms.ListKmsConfigsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17369,49 +26094,52 @@ def test_list_kms_configs_rest_required_fields(request_type=kms.ListKmsConfigsRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_kms_configs(request) + response = client.create_backup_vault(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_kms_configs_rest_unset_required_fields(): +def test_create_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_kms_configs._get_unset_required_fields({}) + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("backupVaultId",)) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "parent", + "backupVaultId", + "backupVault", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_kms_configs_rest_interceptors(null_interceptor): +def test_create_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -17422,13 +26150,17 @@ def test_list_kms_configs_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_kms_configs" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_kms_configs" + transports.NetAppRestInterceptor, "pre_create_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.ListKmsConfigsRequest.pb(kms.ListKmsConfigsRequest()) + pb_message = gcn_backup_vault.CreateBackupVaultRequest.pb( + gcn_backup_vault.CreateBackupVaultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17439,19 +26171,19 @@ def test_list_kms_configs_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = kms.ListKmsConfigsResponse.to_json( - kms.ListKmsConfigsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = kms.ListKmsConfigsRequest() + request = gcn_backup_vault.CreateBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = kms.ListKmsConfigsResponse() + post.return_value = operations_pb2.Operation() - client.list_kms_configs( + client.create_backup_vault( request, metadata=[ ("key", "val"), @@ -17463,8 +26195,8 @@ def test_list_kms_configs_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_kms_configs_rest_bad_request( - transport: str = "rest", request_type=kms.ListKmsConfigsRequest +def test_create_backup_vault_rest_bad_request( + transport: str = "rest", request_type=gcn_backup_vault.CreateBackupVaultRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17484,10 +26216,10 @@ def test_list_kms_configs_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_kms_configs(request) + client.create_backup_vault(request) -def test_list_kms_configs_rest_flattened(): +def test_create_backup_vault_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17496,7 +26228,7 @@ def test_list_kms_configs_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.ListKmsConfigsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -17504,227 +26236,105 @@ def test_list_kms_configs_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = kms.ListKmsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_kms_configs(**mock_args) + client.create_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, args[1], ) -def test_list_kms_configs_rest_flattened_error(transport: str = "rest"): +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_kms_configs( - kms.ListKmsConfigsRequest(), - parent="parent_value", - ) - - -def test_list_kms_configs_rest_pager(transport: str = "rest"): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - kms.KmsConfig(), - kms.KmsConfig(), - ], - next_page_token="abc", - ), - kms.ListKmsConfigsResponse( - kms_configs=[], - next_page_token="def", - ), - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - ], - next_page_token="ghi", - ), - kms.ListKmsConfigsResponse( - kms_configs=[ - kms.KmsConfig(), - kms.KmsConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(kms.ListKmsConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_kms_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, kms.KmsConfig) for i in results) - - pages = list(client.list_kms_configs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - kms.CreateKmsConfigRequest, - dict, - ], -) -def test_create_kms_config_rest(request_type): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["kms_config"] = { - "name": "name_value", - "crypto_key_name": "crypto_key_name_value", - "state": 1, - "state_details": "state_details_value", - "create_time": {"seconds": 751, "nanos": 543}, - "description": "description_value", - "labels": {}, - "instructions": "instructions_value", - "service_account": "service_account_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = kms.CreateKmsConfigRequest.meta.fields["kms_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + gcn_backup_vault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_create_backup_vault_rest_error(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["kms_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backup_vault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest(request_type): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["kms_config"][field])): - del request_init["kms_config"][field][i][subfield] - else: - del request_init["kms_config"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.BackupVault( + name="name_value", + state=backup_vault.BackupVault.State.CREATING, + description="description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_kms_config(request) + response = client.get_backup_vault(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, backup_vault.BackupVault) + assert response.name == "name_value" + assert response.state == backup_vault.BackupVault.State.CREATING + assert response.description == "description_value" -def test_create_kms_config_rest_required_fields( - request_type=kms.CreateKmsConfigRequest, +def test_get_backup_vault_rest_required_fields( + request_type=backup_vault.GetBackupVaultRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["kms_config_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17736,32 +26346,24 @@ def test_create_kms_config_rest_required_fields( ) # verify fields with default values are dropped - assert "kmsConfigId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_kms_config._get_unset_required_fields(jsonified_request) + ).get_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "kmsConfigId" in jsonified_request - assert jsonified_request["kmsConfigId"] == request_init["kms_config_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["kmsConfigId"] = "kms_config_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_kms_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("kms_config_id",)) + ).get_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "kmsConfigId" in jsonified_request - assert jsonified_request["kmsConfigId"] == "kms_config_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17770,7 +26372,7 @@ def test_create_kms_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.BackupVault() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17782,52 +26384,39 @@ def test_create_kms_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_kms_config(request) + response = client.get_backup_vault(request) - expected_params = [ - ( - "kmsConfigId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_kms_config_rest_unset_required_fields(): +def test_get_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("kmsConfigId",)) - & set( - ( - "parent", - "kmsConfigId", - "kmsConfig", - ) - ) - ) + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_kms_config_rest_interceptors(null_interceptor): +def test_get_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -17838,15 +26427,15 @@ def test_create_kms_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_kms_config" + transports.NetAppRestInterceptor, "post_get_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_kms_config" + transports.NetAppRestInterceptor, "pre_get_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.CreateKmsConfigRequest.pb(kms.CreateKmsConfigRequest()) + pb_message = backup_vault.GetBackupVaultRequest.pb( + backup_vault.GetBackupVaultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -17857,19 +26446,19 @@ def test_create_kms_config_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = backup_vault.BackupVault.to_json( + backup_vault.BackupVault() ) - request = kms.CreateKmsConfigRequest() + request = backup_vault.GetBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup_vault.BackupVault() - client.create_kms_config( + client.get_backup_vault( request, metadata=[ ("key", "val"), @@ -17881,8 +26470,8 @@ def test_create_kms_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_kms_config_rest_bad_request( - transport: str = "rest", request_type=kms.CreateKmsConfigRequest +def test_get_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backup_vault.GetBackupVaultRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17890,7 +26479,7 @@ def test_create_kms_config_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17902,10 +26491,10 @@ def test_create_kms_config_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_kms_config(request) + client.get_backup_vault(request) -def test_create_kms_config_rest_flattened(): +def test_get_backup_vault_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17914,39 +26503,42 @@ def test_create_kms_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_vault.BackupVault() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - kms_config=kms.KmsConfig(name="name_value"), - kms_config_id="kms_config_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_vault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_kms_config(**mock_args) + client.get_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/kmsConfigs" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, args[1], ) -def test_create_kms_config_rest_flattened_error(transport: str = "rest"): +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17955,15 +26547,13 @@ def test_create_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_kms_config( - kms.CreateKmsConfigRequest(), - parent="parent_value", - kms_config=kms.KmsConfig(name="name_value"), - kms_config_id="kms_config_id_value", + client.get_backup_vault( + backup_vault.GetBackupVaultRequest(), + name="name_value", ) -def test_create_kms_config_rest_error(): +def test_get_backup_vault_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17972,60 +26562,52 @@ def test_create_kms_config_rest_error(): @pytest.mark.parametrize( "request_type", [ - kms.GetKmsConfigRequest, + backup_vault.ListBackupVaultsRequest, dict, ], ) -def test_get_kms_config_rest(request_type): +def test_list_backup_vaults_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.KmsConfig( - name="name_value", - crypto_key_name="crypto_key_name_value", - state=kms.KmsConfig.State.READY, - state_details="state_details_value", - description="description_value", - instructions="instructions_value", - service_account="service_account_value", + return_value = backup_vault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.KmsConfig.pb(return_value) + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_kms_config(request) + response = client.list_backup_vaults(request) # Establish that the response is the type that we expect. - assert isinstance(response, kms.KmsConfig) - assert response.name == "name_value" - assert response.crypto_key_name == "crypto_key_name_value" - assert response.state == kms.KmsConfig.State.READY - assert response.state_details == "state_details_value" - assert response.description == "description_value" - assert response.instructions == "instructions_value" - assert response.service_account == "service_account_value" + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigRequest): +def test_list_backup_vaults_rest_required_fields( + request_type=backup_vault.ListBackupVaultsRequest, +): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18040,21 +26622,30 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_kms_config._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_kms_config._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18063,7 +26654,7 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = kms.KmsConfig() + return_value = backup_vault.ListBackupVaultsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18084,30 +26675,40 @@ def test_get_kms_config_rest_required_fields(request_type=kms.GetKmsConfigReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.KmsConfig.pb(return_value) + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_kms_config(request) + response = client.list_backup_vaults(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_kms_config_rest_unset_required_fields(): +def test_list_backup_vaults_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_kms_config_rest_interceptors(null_interceptor): +def test_list_backup_vaults_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -18118,13 +26719,15 @@ def test_get_kms_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_kms_config" + transports.NetAppRestInterceptor, "post_list_backup_vaults" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_kms_config" + transports.NetAppRestInterceptor, "pre_list_backup_vaults" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.GetKmsConfigRequest.pb(kms.GetKmsConfigRequest()) + pb_message = backup_vault.ListBackupVaultsRequest.pb( + backup_vault.ListBackupVaultsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18135,17 +26738,19 @@ def test_get_kms_config_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = kms.KmsConfig.to_json(kms.KmsConfig()) + req.return_value._content = backup_vault.ListBackupVaultsResponse.to_json( + backup_vault.ListBackupVaultsResponse() + ) - request = kms.GetKmsConfigRequest() + request = backup_vault.ListBackupVaultsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = kms.KmsConfig() + post.return_value = backup_vault.ListBackupVaultsResponse() - client.get_kms_config( + client.list_backup_vaults( request, metadata=[ ("key", "val"), @@ -18157,8 +26762,8 @@ def test_get_kms_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_kms_config_rest_bad_request( - transport: str = "rest", request_type=kms.GetKmsConfigRequest +def test_list_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backup_vault.ListBackupVaultsRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18166,7 +26771,7 @@ def test_get_kms_config_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18178,10 +26783,10 @@ def test_get_kms_config_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_kms_config(request) + client.list_backup_vaults(request) -def test_get_kms_config_rest_flattened(): +def test_list_backup_vaults_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18190,16 +26795,14 @@ def test_get_kms_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.KmsConfig() + return_value = backup_vault.ListBackupVaultsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -18207,52 +26810,110 @@ def test_get_kms_config_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = kms.KmsConfig.pb(return_value) + return_value = backup_vault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_kms_config(**mock_args) + client.list_backup_vaults(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, args[1], ) -def test_get_kms_config_rest_flattened_error(transport: str = "rest"): +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backup_vault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_kms_config( - kms.GetKmsConfigRequest(), - name="name_value", + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + next_page_token="abc", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + ], + next_page_token="ghi", + ), + backup_vault.ListBackupVaultsResponse( + backup_vaults=[ + backup_vault.BackupVault(), + backup_vault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backup_vault.ListBackupVaultsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_get_kms_config_rest_error(): - client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_vault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - kms.UpdateKmsConfigRequest, + gcn_backup_vault.UpdateBackupVaultRequest, dict, ], ) -def test_update_kms_config_rest(request_type): +def test_update_backup_vault_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18260,25 +26921,23 @@ def test_update_kms_config_rest(request_type): # send a request that will satisfy transcoding request_init = { - "kms_config": {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } } - request_init["kms_config"] = { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3", - "crypto_key_name": "crypto_key_name_value", + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", "state": 1, - "state_details": "state_details_value", "create_time": {"seconds": 751, "nanos": 543}, "description": "description_value", "labels": {}, - "instructions": "instructions_value", - "service_account": "service_account_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = kms.UpdateKmsConfigRequest.meta.fields["kms_config"] + test_field = gcn_backup_vault.UpdateBackupVaultRequest.meta.fields["backup_vault"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18306,7 +26965,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["kms_config"].items(): # pragma: NO COVER + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18336,10 +26995,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["kms_config"][field])): - del request_init["kms_config"][field][i][subfield] + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] else: - del request_init["kms_config"][field][subfield] + del request_init["backup_vault"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18354,14 +27013,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_kms_config(request) + response = client.update_backup_vault(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_kms_config_rest_required_fields( - request_type=kms.UpdateKmsConfigRequest, +def test_update_backup_vault_rest_required_fields( + request_type=gcn_backup_vault.UpdateBackupVaultRequest, ): transport_class = transports.NetAppRestTransport @@ -18380,14 +27039,14 @@ def test_update_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_kms_config._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_kms_config._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -18426,32 +27085,32 @@ def test_update_kms_config_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_kms_config(request) + response = client.update_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_kms_config_rest_unset_required_fields(): +def test_update_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_kms_config._get_unset_required_fields({}) + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == ( set(("updateMask",)) & set( ( "updateMask", - "kmsConfig", + "backupVault", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_kms_config_rest_interceptors(null_interceptor): +def test_update_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -18464,13 +27123,15 @@ def test_update_kms_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_kms_config" + transports.NetAppRestInterceptor, "post_update_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_kms_config" + transports.NetAppRestInterceptor, "pre_update_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.UpdateKmsConfigRequest.pb(kms.UpdateKmsConfigRequest()) + pb_message = gcn_backup_vault.UpdateBackupVaultRequest.pb( + gcn_backup_vault.UpdateBackupVaultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18485,7 +27146,7 @@ def test_update_kms_config_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = kms.UpdateKmsConfigRequest() + request = gcn_backup_vault.UpdateBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18493,7 +27154,7 @@ def test_update_kms_config_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_kms_config( + client.update_backup_vault( request, metadata=[ ("key", "val"), @@ -18505,8 +27166,8 @@ def test_update_kms_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_kms_config_rest_bad_request( - transport: str = "rest", request_type=kms.UpdateKmsConfigRequest +def test_update_backup_vault_rest_bad_request( + transport: str = "rest", request_type=gcn_backup_vault.UpdateBackupVaultRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18515,7 +27176,9 @@ def test_update_kms_config_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "kms_config": {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } } request = request_type(**request_init) @@ -18528,10 +27191,10 @@ def test_update_kms_config_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_kms_config(request) + client.update_backup_vault(request) -def test_update_kms_config_rest_flattened(): +def test_update_backup_vault_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18544,14 +27207,14 @@ def test_update_kms_config_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "kms_config": { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" } } # get truthy value for each flattened field mock_args = dict( - kms_config=kms.KmsConfig(name="name_value"), + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -18563,20 +27226,20 @@ def test_update_kms_config_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_kms_config(**mock_args) + client.update_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{kms_config.name=projects/*/locations/*/kmsConfigs/*}" + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1], ) -def test_update_kms_config_rest_flattened_error(transport: str = "rest"): +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18585,14 +27248,14 @@ def test_update_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_kms_config( - kms.UpdateKmsConfigRequest(), - kms_config=kms.KmsConfig(name="name_value"), + client.update_backup_vault( + gcn_backup_vault.UpdateBackupVaultRequest(), + backup_vault=gcn_backup_vault.BackupVault(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_kms_config_rest_error(): +def test_update_backup_vault_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18601,18 +27264,18 @@ def test_update_kms_config_rest_error(): @pytest.mark.parametrize( "request_type", [ - kms.EncryptVolumesRequest, + backup_vault.DeleteBackupVaultRequest, dict, ], ) -def test_encrypt_volumes_rest(request_type): +def test_delete_backup_vault_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18627,13 +27290,15 @@ def test_encrypt_volumes_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.encrypt_volumes(request) + response = client.delete_backup_vault(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesRequest): +def test_delete_backup_vault_rest_required_fields( + request_type=backup_vault.DeleteBackupVaultRequest, +): transport_class = transports.NetAppRestTransport request_init = {} @@ -18652,7 +27317,7 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).encrypt_volumes._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18661,7 +27326,7 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).encrypt_volumes._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18687,10 +27352,9 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -18700,24 +27364,24 @@ def test_encrypt_volumes_rest_required_fields(request_type=kms.EncryptVolumesReq response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.encrypt_volumes(request) + response = client.delete_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_encrypt_volumes_rest_unset_required_fields(): +def test_delete_backup_vault_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.encrypt_volumes._get_unset_required_fields({}) + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_encrypt_volumes_rest_interceptors(null_interceptor): +def test_delete_backup_vault_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -18730,13 +27394,15 @@ def test_encrypt_volumes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_encrypt_volumes" + transports.NetAppRestInterceptor, "post_delete_backup_vault" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_encrypt_volumes" + transports.NetAppRestInterceptor, "pre_delete_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.EncryptVolumesRequest.pb(kms.EncryptVolumesRequest()) + pb_message = backup_vault.DeleteBackupVaultRequest.pb( + backup_vault.DeleteBackupVaultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18751,7 +27417,7 @@ def test_encrypt_volumes_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = kms.EncryptVolumesRequest() + request = backup_vault.DeleteBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18759,7 +27425,7 @@ def test_encrypt_volumes_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.encrypt_volumes( + client.delete_backup_vault( request, metadata=[ ("key", "val"), @@ -18771,8 +27437,8 @@ def test_encrypt_volumes_rest_interceptors(null_interceptor): post.assert_called_once() -def test_encrypt_volumes_rest_bad_request( - transport: str = "rest", request_type=kms.EncryptVolumesRequest +def test_delete_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backup_vault.DeleteBackupVaultRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18780,7 +27446,7 @@ def test_encrypt_volumes_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18792,10 +27458,67 @@ def test_encrypt_volumes_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.encrypt_volumes(request) + client.delete_backup_vault(request) -def test_encrypt_volumes_rest_error(): +def test_delete_backup_vault_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backup_vault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_delete_backup_vault_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18804,54 +27527,125 @@ def test_encrypt_volumes_rest_error(): @pytest.mark.parametrize( "request_type", [ - kms.VerifyKmsConfigRequest, + gcn_backup.CreateBackupRequest, dict, ], ) -def test_verify_kms_config_rest(request_type): +def test_create_backup_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init["backup"] = { + "name": "name_value", + "state": 1, + "description": "description_value", + "volume_usage_bytes": 1938, + "backup_type": 1, + "source_volume": "source_volume_value", + "source_snapshot": "source_snapshot_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "chain_storage_bytes": 2013, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = kms.VerifyKmsConfigResponse( - healthy=True, - health_error="health_error_value", - instructions="instructions_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = kms.VerifyKmsConfigResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.verify_kms_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, kms.VerifyKmsConfigResponse) - assert response.healthy is True - assert response.health_error == "health_error_value" - assert response.instructions == "instructions_value" + req.return_value = response_value + response = client.create_backup(request) + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" -def test_verify_kms_config_rest_required_fields( - request_type=kms.VerifyKmsConfigRequest, + +def test_create_backup_rest_required_fields( + request_type=gcn_backup.CreateBackupRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18863,24 +27657,32 @@ def test_verify_kms_config_rest_required_fields( ) # verify fields with default values are dropped + assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).verify_kms_config._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).verify_kms_config._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18889,7 +27691,7 @@ def test_verify_kms_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = kms.VerifyKmsConfigResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18909,32 +27711,44 @@ def test_verify_kms_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = kms.VerifyKmsConfigResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.verify_kms_config(request) + response = client.create_backup(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_verify_kms_config_rest_unset_required_fields(): +def test_create_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.verify_kms_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("backupId",)) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_verify_kms_config_rest_interceptors(null_interceptor): +def test_create_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -18945,13 +27759,15 @@ def test_verify_kms_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_verify_kms_config" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_create_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_verify_kms_config" + transports.NetAppRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.VerifyKmsConfigRequest.pb(kms.VerifyKmsConfigRequest()) + pb_message = gcn_backup.CreateBackupRequest.pb(gcn_backup.CreateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18962,19 +27778,19 @@ def test_verify_kms_config_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = kms.VerifyKmsConfigResponse.to_json( - kms.VerifyKmsConfigResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = kms.VerifyKmsConfigRequest() + request = gcn_backup.CreateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = kms.VerifyKmsConfigResponse() + post.return_value = operations_pb2.Operation() - client.verify_kms_config( + client.create_backup( request, metadata=[ ("key", "val"), @@ -18986,8 +27802,8 @@ def test_verify_kms_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_verify_kms_config_rest_bad_request( - transport: str = "rest", request_type=kms.VerifyKmsConfigRequest +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=gcn_backup.CreateBackupRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18995,7 +27811,7 @@ def test_verify_kms_config_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19007,10 +27823,71 @@ def test_verify_kms_config_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.verify_kms_config(request) + client.create_backup(request) -def test_verify_kms_config_rest_error(): +def test_create_backup_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_create_backup_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gcn_backup.CreateBackupRequest(), + parent="parent_value", + backup=gcn_backup.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +def test_create_backup_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19019,41 +27896,60 @@ def test_verify_kms_config_rest_error(): @pytest.mark.parametrize( "request_type", [ - kms.DeleteKmsConfigRequest, + backup.GetBackupRequest, dict, ], ) -def test_delete_kms_config_rest(request_type): +def test_get_backup_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.Backup( + name="name_value", + state=backup.Backup.State.CREATING, + description="description_value", + volume_usage_bytes=1938, + backup_type=backup.Backup.Type.MANUAL, + source_volume="source_volume_value", + source_snapshot="source_snapshot_value", + chain_storage_bytes=2013, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_kms_config(request) + response = client.get_backup(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, backup.Backup) + assert response.name == "name_value" + assert response.state == backup.Backup.State.CREATING + assert response.description == "description_value" + assert response.volume_usage_bytes == 1938 + assert response.backup_type == backup.Backup.Type.MANUAL + assert response.source_volume == "source_volume_value" + assert response.source_snapshot == "source_snapshot_value" + assert response.chain_storage_bytes == 2013 -def test_delete_kms_config_rest_required_fields( - request_type=kms.DeleteKmsConfigRequest, -): +def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -19072,7 +27968,7 @@ def test_delete_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_kms_config._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19081,7 +27977,7 @@ def test_delete_kms_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_kms_config._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19095,7 +27991,7 @@ def test_delete_kms_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19107,36 +28003,39 @@ def test_delete_kms_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_kms_config(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_kms_config_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_kms_config._get_unset_required_fields({}) + unset_fields = transport.get_backup._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_kms_config_rest_interceptors(null_interceptor): +def test_get_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -19147,15 +28046,13 @@ def test_delete_kms_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_kms_config" + transports.NetAppRestInterceptor, "post_get_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_kms_config" + transports.NetAppRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = kms.DeleteKmsConfigRequest.pb(kms.DeleteKmsConfigRequest()) + pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19166,19 +28063,17 @@ def test_delete_kms_config_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = backup.Backup.to_json(backup.Backup()) - request = kms.DeleteKmsConfigRequest() + request = backup.GetBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup.Backup() - client.delete_kms_config( + client.get_backup( request, metadata=[ ("key", "val"), @@ -19190,8 +28085,8 @@ def test_delete_kms_config_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_kms_config_rest_bad_request( - transport: str = "rest", request_type=kms.DeleteKmsConfigRequest +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backup.GetBackupRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19199,7 +28094,9 @@ def test_delete_kms_config_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/kmsConfigs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19211,10 +28108,10 @@ def test_delete_kms_config_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_kms_config(request) + client.get_backup(request) -def test_delete_kms_config_rest_flattened(): +def test_get_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19223,11 +28120,11 @@ def test_delete_kms_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.Backup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/kmsConfigs/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } # get truthy value for each flattened field @@ -19239,23 +28136,26 @@ def test_delete_kms_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_kms_config(**mock_args) + client.get_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/kmsConfigs/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" + % client.transport._host, args[1], ) -def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): +def test_get_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19264,13 +28164,13 @@ def test_delete_kms_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_kms_config( - kms.DeleteKmsConfigRequest(), + client.get_backup( + backup.GetBackupRequest(), name="name_value", ) -def test_delete_kms_config_rest_error(): +def test_get_backup_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19279,24 +28179,24 @@ def test_delete_kms_config_rest_error(): @pytest.mark.parametrize( "request_type", [ - replication.ListReplicationsRequest, + backup.ListBackupsRequest, dict, ], ) -def test_list_replications_rest(request_type): +def test_list_backups_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = replication.ListReplicationsResponse( + return_value = backup.ListBackupsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -19305,22 +28205,20 @@ def test_list_replications_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = replication.ListReplicationsResponse.pb(return_value) + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_replications(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReplicationsPager) + assert isinstance(response, pagers.ListBackupsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_replications_rest_required_fields( - request_type=replication.ListReplicationsRequest, -): +def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -19339,7 +28237,7 @@ def test_list_replications_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_replications._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19348,7 +28246,7 @@ def test_list_replications_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_replications._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -19371,7 +28269,7 @@ def test_list_replications_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = replication.ListReplicationsResponse() + return_value = backup.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19392,25 +28290,25 @@ def test_list_replications_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = replication.ListReplicationsResponse.pb(return_value) + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_replications(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_replications_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_replications._get_unset_required_fields({}) + unset_fields = transport.list_backups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -19425,7 +28323,7 @@ def test_list_replications_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_replications_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -19436,15 +28334,13 @@ def test_list_replications_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_list_replications" + transports.NetAppRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_list_replications" + transports.NetAppRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.ListReplicationsRequest.pb( - replication.ListReplicationsRequest() - ) + pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19455,19 +28351,19 @@ def test_list_replications_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = replication.ListReplicationsResponse.to_json( - replication.ListReplicationsResponse() + req.return_value._content = backup.ListBackupsResponse.to_json( + backup.ListBackupsResponse() ) - request = replication.ListReplicationsRequest() + request = backup.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = replication.ListReplicationsResponse() + post.return_value = backup.ListBackupsResponse() - client.list_replications( + client.list_backups( request, metadata=[ ("key", "val"), @@ -19479,8 +28375,8 @@ def test_list_replications_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_replications_rest_bad_request( - transport: str = "rest", request_type=replication.ListReplicationsRequest +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backup.ListBackupsRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19488,7 +28384,7 @@ def test_list_replications_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19500,10 +28396,10 @@ def test_list_replications_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_replications(request) + client.list_backups(request) -def test_list_replications_rest_flattened(): +def test_list_backups_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19512,11 +28408,11 @@ def test_list_replications_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = replication.ListReplicationsResponse() + return_value = backup.ListBackupsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" } # get truthy value for each flattened field @@ -19529,25 +28425,25 @@ def test_list_replications_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = replication.ListReplicationsResponse.pb(return_value) + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_replications(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/backups" % client.transport._host, args[1], ) -def test_list_replications_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19556,13 +28452,13 @@ def test_list_replications_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_replications( - replication.ListReplicationsRequest(), + client.list_backups( + backup.ListBackupsRequest(), parent="parent_value", ) -def test_list_replications_rest_pager(transport: str = "rest"): +def test_list_backups_rest_pager(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19574,28 +28470,28 @@ def test_list_replications_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), - replication.Replication(), - replication.Replication(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), ], next_page_token="abc", ), - replication.ListReplicationsResponse( - replications=[], + backup.ListBackupsResponse( + backups=[], next_page_token="def", ), - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), ], next_page_token="ghi", ), - replication.ListReplicationsResponse( - replications=[ - replication.Replication(), - replication.Replication(), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), ], ), ) @@ -19603,9 +28499,7 @@ def test_list_replications_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple( - replication.ListReplicationsResponse.to_json(x) for x in response - ) + response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -19613,16 +28507,16 @@ def test_list_replications_rest_pager(transport: str = "rest"): req.side_effect = return_values sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" } - pager = client.list_replications(request=sample_request) + pager = client.list_backups(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, replication.Replication) for i in results) + assert all(isinstance(i, backup.Backup) for i in results) - pages = list(client.list_replications(request=sample_request).pages) + pages = list(client.list_backups(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -19630,69 +28524,41 @@ def test_list_replications_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - replication.GetReplicationRequest, + backup.DeleteBackupRequest, dict, ], ) -def test_get_replication_rest(request_type): +def test_delete_backup_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = replication.Replication( - name="name_value", - state=replication.Replication.State.CREATING, - state_details="state_details_value", - role=replication.Replication.ReplicationRole.SOURCE, - replication_schedule=replication.Replication.ReplicationSchedule.EVERY_10_MINUTES, - mirror_state=replication.Replication.MirrorState.PREPARING, - healthy=True, - destination_volume="destination_volume_value", - description="description_value", - source_volume="source_volume_value", - ) + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_replication(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, replication.Replication) - assert response.name == "name_value" - assert response.state == replication.Replication.State.CREATING - assert response.state_details == "state_details_value" - assert response.role == replication.Replication.ReplicationRole.SOURCE - assert ( - response.replication_schedule - == replication.Replication.ReplicationSchedule.EVERY_10_MINUTES - ) - assert response.mirror_state == replication.Replication.MirrorState.PREPARING - assert response.healthy is True - assert response.destination_volume == "destination_volume_value" - assert response.description == "description_value" - assert response.source_volume == "source_volume_value" + assert response.operation.name == "operations/spam" -def test_get_replication_rest_required_fields( - request_type=replication.GetReplicationRequest, -): +def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): transport_class = transports.NetAppRestTransport request_init = {} @@ -19711,7 +28577,7 @@ def test_get_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_replication._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19720,7 +28586,7 @@ def test_get_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_replication._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19734,7 +28600,7 @@ def test_get_replication_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = replication.Replication() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19746,39 +28612,36 @@ def test_get_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_replication(request) + response = client.delete_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_replication_rest_unset_required_fields(): +def test_delete_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_replication._get_unset_required_fields({}) + unset_fields = transport.delete_backup._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_replication_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -19789,15 +28652,15 @@ def test_get_replication_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.NetAppRestInterceptor, "post_get_replication" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetAppRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_get_replication" + transports.NetAppRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.GetReplicationRequest.pb( - replication.GetReplicationRequest() - ) + pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19808,19 +28671,19 @@ def test_get_replication_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = replication.Replication.to_json( - replication.Replication() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = replication.GetReplicationRequest() + request = backup.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = replication.Replication() + post.return_value = operations_pb2.Operation() - client.get_replication( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -19832,8 +28695,8 @@ def test_get_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_replication_rest_bad_request( - transport: str = "rest", request_type=replication.GetReplicationRequest +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backup.DeleteBackupRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19842,7 +28705,7 @@ def test_get_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } request = request_type(**request_init) @@ -19855,10 +28718,10 @@ def test_get_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_replication(request) + client.delete_backup(request) -def test_get_replication_rest_flattened(): +def test_delete_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19867,11 +28730,11 @@ def test_get_replication_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = replication.Replication() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" } # get truthy value for each flattened field @@ -19883,26 +28746,24 @@ def test_get_replication_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = replication.Replication.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_replication(**mock_args) + client.delete_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*/backups/*}" % client.transport._host, args[1], ) -def test_get_replication_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19911,13 +28772,13 @@ def test_get_replication_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_replication( - replication.GetReplicationRequest(), + client.delete_backup( + backup.DeleteBackupRequest(), name="name_value", ) -def test_get_replication_rest_error(): +def test_delete_backup_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19926,54 +28787,40 @@ def test_get_replication_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_replication.CreateReplicationRequest, + gcn_backup.UpdateBackupRequest, dict, ], ) -def test_create_replication_rest(request_type): +def test_update_backup_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} - request_init["replication"] = { - "name": "name_value", + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4", "state": 1, - "state_details": "state_details_value", - "role": 1, - "replication_schedule": 1, - "mirror_state": 1, - "healthy": True, - "create_time": {"seconds": 751, "nanos": 543}, - "destination_volume": "destination_volume_value", - "transfer_stats": { - "transfer_bytes": 1515, - "total_transfer_duration": {"seconds": 751, "nanos": 543}, - "last_transfer_bytes": 2046, - "last_transfer_duration": {}, - "lag_duration": {}, - "update_time": {}, - "last_transfer_end_time": {}, - "last_transfer_error": "last_transfer_error_value", - }, - "labels": {}, "description": "description_value", - "destination_volume_parameters": { - "storage_pool": "storage_pool_value", - "volume_id": "volume_id_value", - "share_name": "share_name_value", - "description": "description_value", - }, + "volume_usage_bytes": 1938, + "backup_type": 1, "source_volume": "source_volume_value", + "source_snapshot": "source_snapshot_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "chain_storage_bytes": 2013, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcn_replication.CreateReplicationRequest.meta.fields["replication"] + test_field = gcn_backup.UpdateBackupRequest.meta.fields["backup"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20001,7 +28848,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["replication"].items(): # pragma: NO COVER + for field, value in request_init["backup"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20031,10 +28878,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["replication"][field])): - del request_init["replication"][field][i][subfield] + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] else: - del request_init["replication"][field][subfield] + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20049,20 +28896,18 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_replication(request) + response = client.update_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_replication_rest_required_fields( - request_type=gcn_replication.CreateReplicationRequest, +def test_update_backup_rest_required_fields( + request_type=gcn_backup.UpdateBackupRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["parent"] = "" - request_init["replication_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20074,32 +28919,22 @@ def test_create_replication_rest_required_fields( ) # verify fields with default values are dropped - assert "replicationId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_replication._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "replicationId" in jsonified_request - assert jsonified_request["replicationId"] == request_init["replication_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["replicationId"] = "replication_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_replication._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("replication_id",)) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "replicationId" in jsonified_request - assert jsonified_request["replicationId"] == "replication_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20120,7 +28955,7 @@ def test_create_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -20133,39 +28968,32 @@ def test_create_replication_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_replication(request) + response = client.update_backup(request) - expected_params = [ - ( - "replicationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_replication_rest_unset_required_fields(): +def test_update_backup_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_replication._get_unset_required_fields({}) + unset_fields = transport.update_backup._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("replicationId",)) + set(("updateMask",)) & set( ( - "parent", - "replication", - "replicationId", + "updateMask", + "backup", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_replication_rest_interceptors(null_interceptor): +def test_update_backup_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -20178,15 +29006,13 @@ def test_create_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_create_replication" + transports.NetAppRestInterceptor, "post_update_backup" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_create_replication" + transports.NetAppRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_replication.CreateReplicationRequest.pb( - gcn_replication.CreateReplicationRequest() - ) + pb_message = gcn_backup.UpdateBackupRequest.pb(gcn_backup.UpdateBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20201,7 +29027,7 @@ def test_create_replication_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcn_replication.CreateReplicationRequest() + request = gcn_backup.UpdateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20209,7 +29035,7 @@ def test_create_replication_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_replication( + client.update_backup( request, metadata=[ ("key", "val"), @@ -20221,8 +29047,8 @@ def test_create_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_replication_rest_bad_request( - transport: str = "rest", request_type=gcn_replication.CreateReplicationRequest +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=gcn_backup.UpdateBackupRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20230,7 +29056,11 @@ def test_create_replication_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20242,10 +29072,10 @@ def test_create_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_replication(request) + client.update_backup(request) -def test_create_replication_rest_flattened(): +def test_update_backup_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20258,14 +29088,15 @@ def test_create_replication_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/backups/sample4" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - replication=gcn_replication.Replication(name="name_value"), - replication_id="replication_id_value", + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -20276,20 +29107,20 @@ def test_create_replication_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_replication(**mock_args) + client.update_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/volumes/*}/replications" + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/backups/*}" % client.transport._host, args[1], ) -def test_create_replication_rest_flattened_error(transport: str = "rest"): +def test_update_backup_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20298,15 +29129,14 @@ def test_create_replication_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_replication( - gcn_replication.CreateReplicationRequest(), - parent="parent_value", - replication=gcn_replication.Replication(name="name_value"), - replication_id="replication_id_value", + client.update_backup( + gcn_backup.UpdateBackupRequest(), + backup=gcn_backup.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_replication_rest_error(): +def test_update_backup_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20315,20 +29145,99 @@ def test_create_replication_rest_error(): @pytest.mark.parametrize( "request_type", [ - replication.DeleteReplicationRequest, + gcn_backup_policy.CreateBackupPolicyRequest, dict, ], ) -def test_delete_replication_rest(request_type): +def test_create_backup_policy_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_policy"] = { + "name": "name_value", + "daily_backup_limit": 1894, + "weekly_backup_limit": 2020, + "monthly_backup_limit": 2142, + "description": "description_value", + "enabled": True, + "assigned_volume_count": 2253, + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup_policy.CreateBackupPolicyRequest.meta.fields[ + "backup_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_policy"][field])): + del request_init["backup_policy"][field][i][subfield] + else: + del request_init["backup_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20343,19 +29252,20 @@ def test_delete_replication_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_replication(request) + response = client.create_backup_policy(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_replication_rest_required_fields( - request_type=replication.DeleteReplicationRequest, +def test_create_backup_policy_rest_required_fields( + request_type=gcn_backup_policy.CreateBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["backup_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20367,24 +29277,32 @@ def test_delete_replication_rest_required_fields( ) # verify fields with default values are dropped + assert "backupPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_replication._get_unset_required_fields(jsonified_request) + ).create_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupPolicyId" in jsonified_request + assert jsonified_request["backupPolicyId"] == request_init["backup_policy_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPolicyId"] = "backup_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_replication._get_unset_required_fields(jsonified_request) + ).create_backup_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_policy_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPolicyId" in jsonified_request + assert jsonified_request["backupPolicyId"] == "backup_policy_id_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20405,9 +29323,10 @@ def test_delete_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -20417,24 +29336,39 @@ def test_delete_replication_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_replication(request) + response = client.create_backup_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_replication_rest_unset_required_fields(): +def test_create_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_replication._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_backup_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("backupPolicyId",)) + & set( + ( + "parent", + "backupPolicy", + "backupPolicyId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_replication_rest_interceptors(null_interceptor): +def test_create_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -20447,14 +29381,14 @@ def test_delete_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_delete_replication" + transports.NetAppRestInterceptor, "post_create_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_delete_replication" + transports.NetAppRestInterceptor, "pre_create_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.DeleteReplicationRequest.pb( - replication.DeleteReplicationRequest() + pb_message = gcn_backup_policy.CreateBackupPolicyRequest.pb( + gcn_backup_policy.CreateBackupPolicyRequest() ) transcode.return_value = { "method": "post", @@ -20470,7 +29404,7 @@ def test_delete_replication_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = replication.DeleteReplicationRequest() + request = gcn_backup_policy.CreateBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20478,7 +29412,7 @@ def test_delete_replication_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_replication( + client.create_backup_policy( request, metadata=[ ("key", "val"), @@ -20490,8 +29424,8 @@ def test_delete_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_replication_rest_bad_request( - transport: str = "rest", request_type=replication.DeleteReplicationRequest +def test_create_backup_policy_rest_bad_request( + transport: str = "rest", request_type=gcn_backup_policy.CreateBackupPolicyRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20499,9 +29433,7 @@ def test_delete_replication_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20513,10 +29445,10 @@ def test_delete_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_replication(request) + client.create_backup_policy(request) -def test_delete_replication_rest_flattened(): +def test_create_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20528,13 +29460,13 @@ def test_delete_replication_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", ) mock_args.update(sample_request) @@ -20545,20 +29477,20 @@ def test_delete_replication_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_replication(**mock_args) + client.create_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/volumes/*/replications/*}" + "%s/v1/{parent=projects/*/locations/*}/backupPolicies" % client.transport._host, args[1], ) -def test_delete_replication_rest_flattened_error(transport: str = "rest"): +def test_create_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20567,13 +29499,15 @@ def test_delete_replication_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_replication( - replication.DeleteReplicationRequest(), - name="name_value", + client.create_backup_policy( + gcn_backup_policy.CreateBackupPolicyRequest(), + parent="parent_value", + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + backup_policy_id="backup_policy_id_value", ) -def test_delete_replication_rest_error(): +def test_create_backup_policy_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20582,145 +29516,64 @@ def test_delete_replication_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcn_replication.UpdateReplicationRequest, + backup_policy.GetBackupPolicyRequest, dict, ], ) -def test_update_replication_rest(request_type): +def test_get_backup_policy_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "replication": { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } - } - request_init["replication"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4", - "state": 1, - "state_details": "state_details_value", - "role": 1, - "replication_schedule": 1, - "mirror_state": 1, - "healthy": True, - "create_time": {"seconds": 751, "nanos": 543}, - "destination_volume": "destination_volume_value", - "transfer_stats": { - "transfer_bytes": 1515, - "total_transfer_duration": {"seconds": 751, "nanos": 543}, - "last_transfer_bytes": 2046, - "last_transfer_duration": {}, - "lag_duration": {}, - "update_time": {}, - "last_transfer_end_time": {}, - "last_transfer_error": "last_transfer_error_value", - }, - "labels": {}, - "description": "description_value", - "destination_volume_parameters": { - "storage_pool": "storage_pool_value", - "volume_id": "volume_id_value", - "share_name": "share_name_value", - "description": "description_value", - }, - "source_volume": "source_volume_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcn_replication.UpdateReplicationRequest.meta.fields["replication"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["replication"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["replication"][field])): - del request_init["replication"][field][i][subfield] - else: - del request_init["replication"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.BackupPolicy( + name="name_value", + daily_backup_limit=1894, + weekly_backup_limit=2020, + monthly_backup_limit=2142, + description="description_value", + enabled=True, + assigned_volume_count=2253, + state=backup_policy.BackupPolicy.State.CREATING, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_replication(request) + response = client.get_backup_policy(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, backup_policy.BackupPolicy) + assert response.name == "name_value" + assert response.daily_backup_limit == 1894 + assert response.weekly_backup_limit == 2020 + assert response.monthly_backup_limit == 2142 + assert response.description == "description_value" + assert response.enabled is True + assert response.assigned_volume_count == 2253 + assert response.state == backup_policy.BackupPolicy.State.CREATING -def test_update_replication_rest_required_fields( - request_type=gcn_replication.UpdateReplicationRequest, +def test_get_backup_policy_rest_required_fields( + request_type=backup_policy.GetBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20735,19 +29588,21 @@ def test_update_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_replication._get_unset_required_fields(jsonified_request) + ).get_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_replication._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20756,7 +29611,7 @@ def test_update_replication_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.BackupPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20768,45 +29623,39 @@ def test_update_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_replication(request) + response = client.get_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_replication_rest_unset_required_fields(): +def test_get_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_replication._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "replication", - ) - ) - ) + unset_fields = transport.get_backup_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_replication_rest_interceptors(null_interceptor): +def test_get_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -20817,16 +29666,14 @@ def test_update_replication_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_update_replication" + transports.NetAppRestInterceptor, "post_get_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_update_replication" + transports.NetAppRestInterceptor, "pre_get_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcn_replication.UpdateReplicationRequest.pb( - gcn_replication.UpdateReplicationRequest() + pb_message = backup_policy.GetBackupPolicyRequest.pb( + backup_policy.GetBackupPolicyRequest() ) transcode.return_value = { "method": "post", @@ -20838,19 +29685,19 @@ def test_update_replication_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = backup_policy.BackupPolicy.to_json( + backup_policy.BackupPolicy() ) - request = gcn_replication.UpdateReplicationRequest() + request = backup_policy.GetBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup_policy.BackupPolicy() - client.update_replication( + client.get_backup_policy( request, metadata=[ ("key", "val"), @@ -20862,8 +29709,8 @@ def test_update_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_replication_rest_bad_request( - transport: str = "rest", request_type=gcn_replication.UpdateReplicationRequest +def test_get_backup_policy_rest_bad_request( + transport: str = "rest", request_type=backup_policy.GetBackupPolicyRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20871,11 +29718,7 @@ def test_update_replication_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "replication": { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } - } + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20887,10 +29730,10 @@ def test_update_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_replication(request) + client.get_backup_policy(request) -def test_update_replication_rest_flattened(): +def test_get_backup_policy_rest_flattened(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20899,43 +29742,42 @@ def test_update_replication_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.BackupPolicy() # get arguments that satisfy an http rule for this method sample_request = { - "replication": { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" } # get truthy value for each flattened field mock_args = dict( - replication=gcn_replication.Replication(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_policy.BackupPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_replication(**mock_args) + client.get_backup_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{replication.name=projects/*/locations/*/volumes/*/replications/*}" + "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" % client.transport._host, args[1], ) -def test_update_replication_rest_flattened_error(transport: str = "rest"): +def test_get_backup_policy_rest_flattened_error(transport: str = "rest"): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20944,14 +29786,13 @@ def test_update_replication_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_replication( - gcn_replication.UpdateReplicationRequest(), - replication=gcn_replication.Replication(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_backup_policy( + backup_policy.GetBackupPolicyRequest(), + name="name_value", ) -def test_update_replication_rest_error(): +def test_get_backup_policy_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20960,47 +29801,52 @@ def test_update_replication_rest_error(): @pytest.mark.parametrize( "request_type", [ - replication.StopReplicationRequest, + backup_policy.ListBackupPoliciesRequest, dict, ], ) -def test_stop_replication_rest(request_type): +def test_list_backup_policies_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.ListBackupPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.stop_replication(request) + response = client.list_backup_policies(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListBackupPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_stop_replication_rest_required_fields( - request_type=replication.StopReplicationRequest, +def test_list_backup_policies_rest_required_fields( + request_type=backup_policy.ListBackupPoliciesRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21015,21 +29861,30 @@ def test_stop_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_replication._get_unset_required_fields(jsonified_request) + ).list_backup_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_replication._get_unset_required_fields(jsonified_request) + ).list_backup_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21038,7 +29893,7 @@ def test_stop_replication_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_policy.ListBackupPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21050,37 +29905,49 @@ def test_stop_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.stop_replication(request) + response = client.list_backup_policies(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_stop_replication_rest_unset_required_fields(): +def test_list_backup_policies_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.stop_replication._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_stop_replication_rest_interceptors(null_interceptor): +def test_list_backup_policies_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -21091,16 +29958,14 @@ def test_stop_replication_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetAppRestInterceptor, "post_stop_replication" + transports.NetAppRestInterceptor, "post_list_backup_policies" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_stop_replication" + transports.NetAppRestInterceptor, "pre_list_backup_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.StopReplicationRequest.pb( - replication.StopReplicationRequest() + pb_message = backup_policy.ListBackupPoliciesRequest.pb( + backup_policy.ListBackupPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -21112,19 +29977,19 @@ def test_stop_replication_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = backup_policy.ListBackupPoliciesResponse.to_json( + backup_policy.ListBackupPoliciesResponse() ) - request = replication.StopReplicationRequest() + request = backup_policy.ListBackupPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup_policy.ListBackupPoliciesResponse() - client.stop_replication( + client.list_backup_policies( request, metadata=[ ("key", "val"), @@ -21136,8 +30001,8 @@ def test_stop_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_stop_replication_rest_bad_request( - transport: str = "rest", request_type=replication.StopReplicationRequest +def test_list_backup_policies_rest_bad_request( + transport: str = "rest", request_type=backup_policy.ListBackupPoliciesRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21145,9 +30010,7 @@ def test_stop_replication_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21159,23 +30022,137 @@ def test_stop_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.stop_replication(request) + client.list_backup_policies(request) -def test_stop_replication_rest_error(): +def test_list_backup_policies_rest_flattened(): client = NetAppClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backup_policy.ListBackupPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_policy.ListBackupPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_backup_policies_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_policies( + backup_policy.ListBackupPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_backup_policies_rest_pager(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + next_page_token="abc", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[], + next_page_token="def", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + ], + next_page_token="ghi", + ), + backup_policy.ListBackupPoliciesResponse( + backup_policies=[ + backup_policy.BackupPolicy(), + backup_policy.BackupPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backup_policy.ListBackupPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_policy.BackupPolicy) for i in results) + + pages = list(client.list_backup_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - replication.ResumeReplicationRequest, + gcn_backup_policy.UpdateBackupPolicyRequest, dict, ], ) -def test_resume_replication_rest(request_type): +def test_update_backup_policy_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21183,8 +30160,91 @@ def test_resume_replication_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + } + } + request_init["backup_policy"] = { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3", + "daily_backup_limit": 1894, + "weekly_backup_limit": 2020, + "monthly_backup_limit": 2142, + "description": "description_value", + "enabled": True, + "assigned_volume_count": 2253, + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backup_policy.UpdateBackupPolicyRequest.meta.fields[ + "backup_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_policy"][field])): + del request_init["backup_policy"][field][i][subfield] + else: + del request_init["backup_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21199,19 +30259,18 @@ def test_resume_replication_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.resume_replication(request) + response = client.update_backup_policy(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_resume_replication_rest_required_fields( - request_type=replication.ResumeReplicationRequest, +def test_update_backup_policy_rest_required_fields( + request_type=gcn_backup_policy.UpdateBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21226,21 +30285,19 @@ def test_resume_replication_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).resume_replication._get_unset_required_fields(jsonified_request) + ).update_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).resume_replication._get_unset_required_fields(jsonified_request) + ).update_backup_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21261,7 +30318,7 @@ def test_resume_replication_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21274,24 +30331,32 @@ def test_resume_replication_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.resume_replication(request) + response = client.update_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_resume_replication_rest_unset_required_fields(): +def test_update_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.resume_replication._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_backup_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "backupPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_resume_replication_rest_interceptors(null_interceptor): +def test_update_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -21304,14 +30369,14 @@ def test_resume_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_resume_replication" + transports.NetAppRestInterceptor, "post_update_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_resume_replication" + transports.NetAppRestInterceptor, "pre_update_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.ResumeReplicationRequest.pb( - replication.ResumeReplicationRequest() + pb_message = gcn_backup_policy.UpdateBackupPolicyRequest.pb( + gcn_backup_policy.UpdateBackupPolicyRequest() ) transcode.return_value = { "method": "post", @@ -21327,7 +30392,7 @@ def test_resume_replication_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = replication.ResumeReplicationRequest() + request = gcn_backup_policy.UpdateBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21335,7 +30400,7 @@ def test_resume_replication_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.resume_replication( + client.update_backup_policy( request, metadata=[ ("key", "val"), @@ -21347,8 +30412,8 @@ def test_resume_replication_rest_interceptors(null_interceptor): post.assert_called_once() -def test_resume_replication_rest_bad_request( - transport: str = "rest", request_type=replication.ResumeReplicationRequest +def test_update_backup_policy_rest_bad_request( + transport: str = "rest", request_type=gcn_backup_policy.UpdateBackupPolicyRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21357,7 +30422,9 @@ def test_resume_replication_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + } } request = request_type(**request_init) @@ -21370,10 +30437,71 @@ def test_resume_replication_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.resume_replication(request) + client.update_backup_policy(request) -def test_resume_replication_rest_error(): +def test_update_backup_policy_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_policy": { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_policy.name=projects/*/locations/*/backupPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_policy_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_policy( + gcn_backup_policy.UpdateBackupPolicyRequest(), + backup_policy=gcn_backup_policy.BackupPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_policy_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21382,20 +30510,18 @@ def test_resume_replication_rest_error(): @pytest.mark.parametrize( "request_type", [ - replication.ReverseReplicationDirectionRequest, + backup_policy.DeleteBackupPolicyRequest, dict, ], ) -def test_reverse_replication_direction_rest(request_type): +def test_delete_backup_policy_rest(request_type): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21410,14 +30536,14 @@ def test_reverse_replication_direction_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reverse_replication_direction(request) + response = client.delete_backup_policy(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_reverse_replication_direction_rest_required_fields( - request_type=replication.ReverseReplicationDirectionRequest, +def test_delete_backup_policy_rest_required_fields( + request_type=backup_policy.DeleteBackupPolicyRequest, ): transport_class = transports.NetAppRestTransport @@ -21437,7 +30563,7 @@ def test_reverse_replication_direction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + ).delete_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21446,7 +30572,7 @@ def test_reverse_replication_direction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reverse_replication_direction._get_unset_required_fields(jsonified_request) + ).delete_backup_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -21472,10 +30598,9 @@ def test_reverse_replication_direction_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -21485,26 +30610,24 @@ def test_reverse_replication_direction_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reverse_replication_direction(request) + response = client.delete_backup_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_reverse_replication_direction_rest_unset_required_fields(): +def test_delete_backup_policy_rest_unset_required_fields(): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.reverse_replication_direction._get_unset_required_fields( - {} - ) + unset_fields = transport.delete_backup_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reverse_replication_direction_rest_interceptors(null_interceptor): +def test_delete_backup_policy_rest_interceptors(null_interceptor): transport = transports.NetAppRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.NetAppRestInterceptor(), @@ -21517,14 +30640,14 @@ def test_reverse_replication_direction_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.NetAppRestInterceptor, "post_reverse_replication_direction" + transports.NetAppRestInterceptor, "post_delete_backup_policy" ) as post, mock.patch.object( - transports.NetAppRestInterceptor, "pre_reverse_replication_direction" + transports.NetAppRestInterceptor, "pre_delete_backup_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = replication.ReverseReplicationDirectionRequest.pb( - replication.ReverseReplicationDirectionRequest() + pb_message = backup_policy.DeleteBackupPolicyRequest.pb( + backup_policy.DeleteBackupPolicyRequest() ) transcode.return_value = { "method": "post", @@ -21540,7 +30663,7 @@ def test_reverse_replication_direction_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = replication.ReverseReplicationDirectionRequest() + request = backup_policy.DeleteBackupPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21548,7 +30671,7 @@ def test_reverse_replication_direction_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.reverse_replication_direction( + client.delete_backup_policy( request, metadata=[ ("key", "val"), @@ -21560,8 +30683,8 @@ def test_reverse_replication_direction_rest_interceptors(null_interceptor): post.assert_called_once() -def test_reverse_replication_direction_rest_bad_request( - transport: str = "rest", request_type=replication.ReverseReplicationDirectionRequest +def test_delete_backup_policy_rest_bad_request( + transport: str = "rest", request_type=backup_policy.DeleteBackupPolicyRequest ): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21569,9 +30692,7 @@ def test_reverse_replication_direction_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/replications/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/backupPolicies/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21583,10 +30704,67 @@ def test_reverse_replication_direction_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.reverse_replication_direction(request) + client.delete_backup_policy(request) -def test_reverse_replication_direction_rest_error(): +def test_delete_backup_policy_rest_flattened(): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_policy_rest_flattened_error(transport: str = "rest"): + client = NetAppClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_policy( + backup_policy.DeleteBackupPolicyRequest(), + name="name_value", + ) + + +def test_delete_backup_policy_rest_error(): client = NetAppClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21767,6 +30945,21 @@ def test_net_app_base_transport(): "stop_replication", "resume_replication", "reverse_replication_direction", + "create_backup_vault", + "get_backup_vault", + "list_backup_vaults", + "update_backup_vault", + "delete_backup_vault", + "create_backup", + "get_backup", + "list_backups", + "delete_backup", + "update_backup", + "create_backup_policy", + "get_backup_policy", + "list_backup_policies", + "update_backup_policy", + "delete_backup_policy", "get_location", "list_locations", "get_operation", @@ -22158,6 +31351,51 @@ def test_net_app_client_transport_session_collision(transport_name): session1 = client1.transport.reverse_replication_direction._session session2 = client2.transport.reverse_replication_direction._session assert session1 != session2 + session1 = client1.transport.create_backup_vault._session + session2 = client2.transport.create_backup_vault._session + assert session1 != session2 + session1 = client1.transport.get_backup_vault._session + session2 = client2.transport.get_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_backup_vaults._session + session2 = client2.transport.list_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.update_backup_vault._session + session2 = client2.transport.update_backup_vault._session + assert session1 != session2 + session1 = client1.transport.delete_backup_vault._session + session2 = client2.transport.delete_backup_vault._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup_policy._session + session2 = client2.transport.create_backup_policy._session + assert session1 != session2 + session1 = client1.transport.get_backup_policy._session + session2 = client2.transport.get_backup_policy._session + assert session1 != session2 + session1 = client1.transport.list_backup_policies._session + session2 = client2.transport.list_backup_policies._session + assert session1 != session2 + session1 = client1.transport.update_backup_policy._session + session2 = client2.transport.update_backup_policy._session + assert session1 != session2 + session1 = client1.transport.delete_backup_policy._session + session2 = client2.transport.delete_backup_policy._session + assert session1 != session2 def test_net_app_grpc_transport_channel(): @@ -22338,10 +31576,95 @@ def test_parse_active_directory_path(): assert expected == actual -def test_kms_config_path(): +def test_backup_path(): project = "cuttlefish" location = "mussel" - kms_config = "winkle" + backup_vault = "winkle" + backup = "nautilus" + expected = "projects/{project}/locations/{location}/backupVaults/{backup_vault}/backups/{backup}".format( + project=project, + location=location, + backup_vault=backup_vault, + backup=backup, + ) + actual = NetAppClient.backup_path(project, location, backup_vault, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "scallop", + "location": "abalone", + "backup_vault": "squid", + "backup": "clam", + } + path = NetAppClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = NetAppClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_policy_path(): + project = "whelk" + location = "octopus" + backup_policy = "oyster" + expected = ( + "projects/{project}/locations/{location}/backupPolicies/{backup_policy}".format( + project=project, + location=location, + backup_policy=backup_policy, + ) + ) + actual = NetAppClient.backup_policy_path(project, location, backup_policy) + assert expected == actual + + +def test_parse_backup_policy_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "backup_policy": "mussel", + } + path = NetAppClient.backup_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetAppClient.parse_backup_policy_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "winkle" + location = "nautilus" + backup_vault = "scallop" + expected = ( + "projects/{project}/locations/{location}/backupVaults/{backup_vault}".format( + project=project, + location=location, + backup_vault=backup_vault, + ) + ) + actual = NetAppClient.backup_vault_path(project, location, backup_vault) + assert expected == actual + + +def test_parse_backup_vault_path(): + expected = { + "project": "abalone", + "location": "squid", + "backup_vault": "clam", + } + path = NetAppClient.backup_vault_path(**expected) + + # Check that the path construction is reversible. + actual = NetAppClient.parse_backup_vault_path(path) + assert expected == actual + + +def test_kms_config_path(): + project = "whelk" + location = "octopus" + kms_config = "oyster" expected = "projects/{project}/locations/{location}/kmsConfigs/{kms_config}".format( project=project, location=location, @@ -22353,9 +31676,9 @@ def test_kms_config_path(): def test_parse_kms_config_path(): expected = { - "project": "nautilus", - "location": "scallop", - "kms_config": "abalone", + "project": "nudibranch", + "location": "cuttlefish", + "kms_config": "mussel", } path = NetAppClient.kms_config_path(**expected) @@ -22365,8 +31688,8 @@ def test_parse_kms_config_path(): def test_network_path(): - project = "squid" - network = "clam" + project = "winkle" + network = "nautilus" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -22377,8 +31700,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "whelk", - "network": "octopus", + "project": "scallop", + "network": "abalone", } path = NetAppClient.network_path(**expected) @@ -22388,10 +31711,10 @@ def test_parse_network_path(): def test_replication_path(): - project = "oyster" - location = "nudibranch" - volume = "cuttlefish" - replication = "mussel" + project = "squid" + location = "clam" + volume = "whelk" + replication = "octopus" expected = "projects/{project}/locations/{location}/volumes/{volume}/replications/{replication}".format( project=project, location=location, @@ -22404,10 +31727,10 @@ def test_replication_path(): def test_parse_replication_path(): expected = { - "project": "winkle", - "location": "nautilus", - "volume": "scallop", - "replication": "abalone", + "project": "oyster", + "location": "nudibranch", + "volume": "cuttlefish", + "replication": "mussel", } path = NetAppClient.replication_path(**expected) @@ -22417,10 +31740,10 @@ def test_parse_replication_path(): def test_snapshot_path(): - project = "squid" - location = "clam" - volume = "whelk" - snapshot = "octopus" + project = "winkle" + location = "nautilus" + volume = "scallop" + snapshot = "abalone" expected = "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format( project=project, location=location, @@ -22433,10 +31756,10 @@ def test_snapshot_path(): def test_parse_snapshot_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "volume": "cuttlefish", - "snapshot": "mussel", + "project": "squid", + "location": "clam", + "volume": "whelk", + "snapshot": "octopus", } path = NetAppClient.snapshot_path(**expected) @@ -22446,9 +31769,9 @@ def test_parse_snapshot_path(): def test_storage_pool_path(): - project = "winkle" - location = "nautilus" - storage_pool = "scallop" + project = "oyster" + location = "nudibranch" + storage_pool = "cuttlefish" expected = ( "projects/{project}/locations/{location}/storagePools/{storage_pool}".format( project=project, @@ -22462,9 +31785,9 @@ def test_storage_pool_path(): def test_parse_storage_pool_path(): expected = { - "project": "abalone", - "location": "squid", - "storage_pool": "clam", + "project": "mussel", + "location": "winkle", + "storage_pool": "nautilus", } path = NetAppClient.storage_pool_path(**expected) @@ -22474,9 +31797,9 @@ def test_parse_storage_pool_path(): def test_volume_path(): - project = "whelk" - location = "octopus" - volume = "oyster" + project = "scallop" + location = "abalone" + volume = "squid" expected = "projects/{project}/locations/{location}/volumes/{volume}".format( project=project, location=location, @@ -22488,9 +31811,9 @@ def test_volume_path(): def test_parse_volume_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "volume": "mussel", + "project": "clam", + "location": "whelk", + "volume": "octopus", } path = NetAppClient.volume_path(**expected) @@ -22500,7 +31823,7 @@ def test_parse_volume_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -22510,7 +31833,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "nudibranch", } path = NetAppClient.common_billing_account_path(**expected) @@ -22520,7 +31843,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -22530,7 +31853,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "mussel", } path = NetAppClient.common_folder_path(**expected) @@ -22540,7 +31863,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -22550,7 +31873,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nautilus", } path = NetAppClient.common_organization_path(**expected) @@ -22560,7 +31883,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -22570,7 +31893,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "abalone", } path = NetAppClient.common_project_path(**expected) @@ -22580,8 +31903,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -22592,8 +31915,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "whelk", + "location": "octopus", } path = NetAppClient.common_location_path(**expected) diff --git a/packages/google-cloud-os-login/CHANGELOG.md b/packages/google-cloud-os-login/CHANGELOG.md index 80708f562bb6..0bdca5ae71d2 100644 --- a/packages/google-cloud-os-login/CHANGELOG.md +++ b/packages/google-cloud-os-login/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-oslogin/#history +## [2.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-os-login-v2.12.0...google-cloud-os-login-v2.13.0) (2024-01-12) + + +### Features + +* [google-cloud-os-login] add regions field to ([d30f83d](https://github.com/googleapis/google-cloud-python/commit/d30f83d887666b7cc2c26a2fdb65f5420ec56b64)) +* [google-cloud-os-login] added field `ImportSshPublicKeyRequest.regions` ([#12168](https://github.com/googleapis/google-cloud-python/issues/12168)) ([d30f83d](https://github.com/googleapis/google-cloud-python/commit/d30f83d887666b7cc2c26a2fdb65f5420ec56b64)) + ## [2.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-os-login-v2.11.0...google-cloud-os-login-v2.12.0) (2023-12-07) diff --git a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py index f8c008523547..13e6df46e52c 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py index f8c008523547..13e6df46e52c 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.12.0" # {x-release-please-version} +__version__ = "2.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py index c8eb5f656abe..e730e226592a 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py @@ -187,6 +187,11 @@ class ImportSshPublicKeyRequest(proto.Message): project_id (str): The project ID of the Google Cloud Platform project. + regions (MutableSequence[str]): + Optional. The regions to which to assert that + the key was written. If unspecified, defaults to + all regions. Regions are listed at + https://cloud.google.com/about/locations#region. """ parent: str = proto.Field( @@ -202,6 +207,10 @@ class ImportSshPublicKeyRequest(proto.Message): proto.STRING, number=3, ) + regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) class ImportSshPublicKeyResponse(proto.Message): diff --git a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json index 57993a57824c..21f93396886f 100644 --- a/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json +++ b/packages/google-cloud-os-login/samples/generated_samples/snippet_metadata_google.cloud.oslogin.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-os-login", - "version": "2.12.0" + "version": "2.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-os-login/scripts/fixup_oslogin_v1_keywords.py b/packages/google-cloud-os-login/scripts/fixup_oslogin_v1_keywords.py index a7d415b2f5d4..e295b981f772 100644 --- a/packages/google-cloud-os-login/scripts/fixup_oslogin_v1_keywords.py +++ b/packages/google-cloud-os-login/scripts/fixup_oslogin_v1_keywords.py @@ -44,7 +44,7 @@ class osloginCallTransformer(cst.CSTTransformer): 'delete_ssh_public_key': ('name', ), 'get_login_profile': ('name', 'project_id', 'system_id', ), 'get_ssh_public_key': ('name', ), - 'import_ssh_public_key': ('parent', 'ssh_public_key', 'project_id', ), + 'import_ssh_public_key': ('parent', 'ssh_public_key', 'project_id', 'regions', ), 'update_ssh_public_key': ('name', 'ssh_public_key', 'update_mask', ), } diff --git a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py index 55fdb02f4f5a..027ad065dd04 100644 --- a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py +++ b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py @@ -4039,7 +4039,12 @@ def test_import_ssh_public_key_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).import_ssh_public_key._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("project_id",)) + assert not set(unset_fields) - set( + ( + "project_id", + "regions", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4094,7 +4099,15 @@ def test_import_ssh_public_key_rest_unset_required_fields(): ) unset_fields = transport.import_ssh_public_key._get_unset_required_fields({}) - assert set(unset_fields) == (set(("projectId",)) & set(("parent",))) + assert set(unset_fields) == ( + set( + ( + "projectId", + "regions", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md index a93a467b41f5..349383e14ea2 100644 --- a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md +++ b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.16.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.16.0...google-cloud-recaptcha-enterprise-v1.16.1) (2024-01-19) + + +### Documentation + +* [google-cloud-recaptcha-enterprise] update comment for `AccountVerificationInfo.username` ([#12201](https://github.com/googleapis/google-cloud-python/issues/12201)) ([f7cd400](https://github.com/googleapis/google-cloud-python/commit/f7cd400be8b61d01904a950fbf7fa474325e4300)) + ## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.15.0...google-cloud-recaptcha-enterprise-v1.16.0) (2023-12-07) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 725b83961de2..ca8e96ca66da 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 725b83961de2..ca8e96ca66da 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py index 02a8b8712d39..95d0f675f689 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py @@ -514,9 +514,9 @@ class AccountVerificationInfo(proto.Message): Output only. Result of the latest account verification challenge. username (str): - Username of the account that is being - verified. Deprecated. Customers should now - provide the hashed account ID field in Event. + Username of the account that is being verified. Deprecated. + Customers should now provide the ``account_id`` field in + ``event.user_info``. """ class Result(proto.Enum): diff --git a/packages/google-cloud-recaptcha-enterprise/noxfile.py b/packages/google-cloud-recaptcha-enterprise/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-recaptcha-enterprise/noxfile.py +++ b/packages/google-cloud-recaptcha-enterprise/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index bc936efa68b7..193d13331978 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "1.16.0" + "version": "1.16.1" }, "snippets": [ { diff --git a/packages/google-cloud-recommender/CHANGELOG.md b/packages/google-cloud-recommender/CHANGELOG.md index 63444294911c..9b15cd4dd01f 100644 --- a/packages/google-cloud-recommender/CHANGELOG.md +++ b/packages/google-cloud-recommender/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-recommender/#history +## [2.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recommender-v2.13.0...google-cloud-recommender-v2.14.0) (2023-12-12) + + +### Features + +* Support cost_in_local_currency field in the cost projection ([8832a03](https://github.com/googleapis/google-cloud-python/commit/8832a03cb0de53f3e30ca53899091a0a3433a409)) + + +### Documentation + +* Add comment for targetResources ([8832a03](https://github.com/googleapis/google-cloud-python/commit/8832a03cb0de53f3e30ca53899091a0a3433a409)) +* Fix typo for the comment of reliability_projection ([8832a03](https://github.com/googleapis/google-cloud-python/commit/8832a03cb0de53f3e30ca53899091a0a3433a409)) + ## [2.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recommender-v2.12.1...google-cloud-recommender-v2.13.0) (2023-12-07) diff --git a/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py index 13e6df46e52c..d5b133eaf7fe 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py index 13e6df46e52c..d5b133eaf7fe 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py index 78ab904a6ce0..ecfe1fffab73 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/async_client.py @@ -708,6 +708,8 @@ async def sample_list_recommendations(): - ``priority`` + - ``targetResources`` + Examples: - ``stateInfo.state = ACTIVE OR stateInfo.state = DISMISSED`` @@ -716,8 +718,12 @@ async def sample_list_recommendations(): - ``priority = P1 OR priority = P2`` + - ``targetResources : //compute.googleapis.com/projects/1234/zones/us-central1-a/instances/instance-1`` + - ``stateInfo.state = ACTIVE AND (priority = P1 OR priority = P2)`` + The max allowed filter length is 500 characters. + (These expressions are based on the filter language described at https://google.aip.dev/160) diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py index a6311259c03a..c446fff128fd 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/services/recommender/client.py @@ -1018,6 +1018,8 @@ def sample_list_recommendations(): - ``priority`` + - ``targetResources`` + Examples: - ``stateInfo.state = ACTIVE OR stateInfo.state = DISMISSED`` @@ -1026,8 +1028,12 @@ def sample_list_recommendations(): - ``priority = P1 OR priority = P2`` + - ``targetResources : //compute.googleapis.com/projects/1234/zones/us-central1-a/instances/instance-1`` + - ``stateInfo.state = ACTIVE AND (priority = P1 OR priority = P2)`` + The max allowed filter length is 500 characters. + (These expressions are based on the filter language described at https://google.aip.dev/160) diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommendation.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommendation.py index c35448f7b67e..0349d5bf8037 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommendation.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommendation.py @@ -439,6 +439,9 @@ class CostProjection(proto.Message): prices. duration (google.protobuf.duration_pb2.Duration): Duration for which this cost applies. + cost_in_local_currency (google.type.money_pb2.Money): + The approximate cost savings in the billing + account's local currency. """ cost: money_pb2.Money = proto.Field( @@ -451,6 +454,11 @@ class CostProjection(proto.Message): number=2, message=duration_pb2.Duration, ) + cost_in_local_currency: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=3, + message=money_pb2.Money, + ) class SecurityProjection(proto.Message): @@ -564,7 +572,7 @@ class Impact(proto.Message): This field is a member of `oneof`_ ``projection``. reliability_projection (google.cloud.recommender_v1.types.ReliabilityProjection): - Use with CategoryType.RELAIBILITY + Use with CategoryType.RELIABILITY This field is a member of `oneof`_ ``projection``. """ diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommender_service.py b/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommender_service.py index 1b0c820877bc..160aa62483fe 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommender_service.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1/types/recommender_service.py @@ -94,6 +94,8 @@ class ListInsightsRequest(proto.Message): - ``severity`` + - ``targetResources`` + Examples: - ``stateInfo.state = ACTIVE OR stateInfo.state = DISMISSED`` @@ -102,8 +104,12 @@ class ListInsightsRequest(proto.Message): - ``severity = CRITICAL OR severity = HIGH`` + - ``targetResources : //compute.googleapis.com/projects/1234/zones/us-central1-a/instances/instance-1`` + - ``stateInfo.state = ACTIVE AND (severity = CRITICAL OR severity = HIGH)`` + The max allowed filter length is 500 characters. + (These expressions are based on the filter language described at https://google.aip.dev/160) """ @@ -239,6 +245,8 @@ class ListRecommendationsRequest(proto.Message): - ``priority`` + - ``targetResources`` + Examples: - ``stateInfo.state = ACTIVE OR stateInfo.state = DISMISSED`` @@ -247,8 +255,12 @@ class ListRecommendationsRequest(proto.Message): - ``priority = P1 OR priority = P2`` + - ``targetResources : //compute.googleapis.com/projects/1234/zones/us-central1-a/instances/instance-1`` + - ``stateInfo.state = ACTIVE AND (priority = P1 OR priority = P2)`` + The max allowed filter length is 500 characters. + (These expressions are based on the filter language described at https://google.aip.dev/160) """ @@ -319,7 +331,7 @@ class MarkRecommendationDismissedRequest(proto.Message): Attributes: name (str): - Name of the recommendation. + Required. Name of the recommendation. etag (str): Fingerprint of the Recommendation. Provides optimistic locking. diff --git a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py index 13e6df46e52c..d5b133eaf7fe 100644 --- a/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py +++ b/packages/google-cloud-recommender/google/cloud/recommender_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.0" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json index e027227c237f..1a37815fbfa4 100644 --- a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json +++ b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recommender", - "version": "2.13.0" + "version": "2.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json index fac0335f8577..f857f71968ec 100644 --- a/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json +++ b/packages/google-cloud-recommender/samples/generated_samples/snippet_metadata_google.cloud.recommender.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-recommender", - "version": "2.13.0" + "version": "2.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-retail/CHANGELOG.md b/packages/google-cloud-retail/CHANGELOG.md index a8f10945c300..0578ce2a4b89 100644 --- a/packages/google-cloud-retail/CHANGELOG.md +++ b/packages/google-cloud-retail/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-retail-v1.17.0...google-cloud-retail-v1.18.0) (2024-01-22) + + +### Features + +* **v2alpha:** Add analytics service ([266cb0c](https://github.com/googleapis/google-cloud-python/commit/266cb0cbf245e28ac61ae940f83d732b768fc38f)) +* **v2beta:** Add analytics service ([266cb0c](https://github.com/googleapis/google-cloud-python/commit/266cb0cbf245e28ac61ae940f83d732b768fc38f)) + ## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-retail-v1.16.3...google-cloud-retail-v1.17.0) (2023-12-07) diff --git a/packages/google-cloud-retail/docs/retail_v2alpha/analytics_service.rst b/packages/google-cloud-retail/docs/retail_v2alpha/analytics_service.rst new file mode 100644 index 000000000000..a116a37ea5da --- /dev/null +++ b/packages/google-cloud-retail/docs/retail_v2alpha/analytics_service.rst @@ -0,0 +1,6 @@ +AnalyticsService +---------------------------------- + +.. automodule:: google.cloud.retail_v2alpha.services.analytics_service + :members: + :inherited-members: diff --git a/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst b/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst index 464a57dc017f..1cb0a4f0b623 100644 --- a/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst +++ b/packages/google-cloud-retail/docs/retail_v2alpha/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Retail v2alpha API .. toctree:: :maxdepth: 2 + analytics_service catalog_service completion_service control_service diff --git a/packages/google-cloud-retail/docs/retail_v2beta/analytics_service.rst b/packages/google-cloud-retail/docs/retail_v2beta/analytics_service.rst new file mode 100644 index 000000000000..c6e491c84bfe --- /dev/null +++ b/packages/google-cloud-retail/docs/retail_v2beta/analytics_service.rst @@ -0,0 +1,6 @@ +AnalyticsService +---------------------------------- + +.. automodule:: google.cloud.retail_v2beta.services.analytics_service + :members: + :inherited-members: diff --git a/packages/google-cloud-retail/docs/retail_v2beta/services_.rst b/packages/google-cloud-retail/docs/retail_v2beta/services_.rst index 970cae465af6..ddcff7dff36c 100644 --- a/packages/google-cloud-retail/docs/retail_v2beta/services_.rst +++ b/packages/google-cloud-retail/docs/retail_v2beta/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Retail v2beta API .. toctree:: :maxdepth: 2 + analytics_service catalog_service completion_service control_service diff --git a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py index 3293978284a9..1d1b45f2c68a 100644 --- a/packages/google-cloud-retail/google/cloud/retail/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py index 3293978284a9..1d1b45f2c68a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py index f8be59c9d6ee..c0bff0cf61ae 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.analytics_service import ( + AnalyticsServiceAsyncClient, + AnalyticsServiceClient, +) from .services.catalog_service import CatalogServiceAsyncClient, CatalogServiceClient from .services.completion_service import ( CompletionServiceAsyncClient, @@ -100,11 +104,14 @@ ) from .types.export_config import ( BigQueryOutputResult, + ExportAnalyticsMetricsRequest, + ExportAnalyticsMetricsResponse, ExportErrorsConfig, ExportMetadata, ExportProductsResponse, ExportUserEventsResponse, GcsOutputResult, + OutputConfig, OutputResult, ) from .types.import_config import ( @@ -212,6 +219,7 @@ ) __all__ = ( + "AnalyticsServiceAsyncClient", "CatalogServiceAsyncClient", "CompletionServiceAsyncClient", "ControlServiceAsyncClient", @@ -230,6 +238,7 @@ "AddLocalInventoriesMetadata", "AddLocalInventoriesRequest", "AddLocalInventoriesResponse", + "AnalyticsServiceClient", "AttributeConfigLevel", "AttributesConfig", "Audience", @@ -265,6 +274,8 @@ "DeleteProductRequest", "DeleteServingConfigRequest", "ExperimentInfo", + "ExportAnalyticsMetricsRequest", + "ExportAnalyticsMetricsResponse", "ExportErrorsConfig", "ExportMetadata", "ExportProductsResponse", @@ -310,6 +321,7 @@ "MerchantCenterLinkingConfig", "Model", "ModelServiceClient", + "OutputConfig", "OutputResult", "PauseModelRequest", "PredictRequest", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json index 05416941fc23..d4859b9820be 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_metadata.json @@ -5,6 +5,40 @@ "protoPackage": "google.cloud.retail.v2alpha", "schema": "1.0", "services": { + "AnalyticsService": { + "clients": { + "grpc": { + "libraryClient": "AnalyticsServiceClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AnalyticsServiceAsyncClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + }, + "rest": { + "libraryClient": "AnalyticsServiceClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + } + } + }, "CatalogService": { "clients": { "grpc": { diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py index 3293978284a9..1d1b45f2c68a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/__init__.py new file mode 100644 index 000000000000..a4564e6dcc53 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AnalyticsServiceAsyncClient +from .client import AnalyticsServiceClient + +__all__ = ( + "AnalyticsServiceClient", + "AnalyticsServiceAsyncClient", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py new file mode 100644 index 000000000000..124542272372 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/async_client.py @@ -0,0 +1,459 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import export_config + +from .client import AnalyticsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .transports.grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport + + +class AnalyticsServiceAsyncClient: + """Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + """ + + _client: AnalyticsServiceClient + + DEFAULT_ENDPOINT = AnalyticsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AnalyticsServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + AnalyticsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AnalyticsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AnalyticsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AnalyticsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AnalyticsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AnalyticsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AnalyticsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AnalyticsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AnalyticsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AnalyticsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceAsyncClient: The constructed client. + """ + return AnalyticsServiceClient.from_service_account_info.__func__(AnalyticsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceAsyncClient: The constructed client. + """ + return AnalyticsServiceClient.from_service_account_file.__func__(AnalyticsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AnalyticsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AnalyticsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AnalyticsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AnalyticsServiceClient).get_transport_class, type(AnalyticsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AnalyticsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the analytics service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AnalyticsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AnalyticsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def export_analytics_metrics( + self, + request: Optional[ + Union[export_config.ExportAnalyticsMetricsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + async def sample_export_analytics_metrics(): + # Create a client + client = retail_v2alpha.AnalyticsServiceAsyncClient() + + # Initialize request argument(s) + output_config = retail_v2alpha.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2alpha.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsRequest, dict]]): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsResponse` Response of the ExportAnalyticsMetricsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + request = export_config.ExportAnalyticsMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_analytics_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("catalog", request.catalog),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + export_config.ExportAnalyticsMetricsResponse, + metadata_type=export_config.ExportMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AnalyticsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AnalyticsServiceAsyncClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py new file mode 100644 index 000000000000..1d1fce74d67e --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/client.py @@ -0,0 +1,666 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import export_config + +from .transports.base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .transports.grpc import AnalyticsServiceGrpcTransport +from .transports.grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport +from .transports.rest import AnalyticsServiceRestTransport + + +class AnalyticsServiceClientMeta(type): + """Metaclass for the AnalyticsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AnalyticsServiceTransport]] + _transport_registry["grpc"] = AnalyticsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AnalyticsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AnalyticsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AnalyticsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AnalyticsServiceClient(metaclass=AnalyticsServiceClientMeta): + """Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "retail.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AnalyticsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AnalyticsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AnalyticsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the analytics service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AnalyticsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AnalyticsServiceTransport): + # transport is a AnalyticsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def export_analytics_metrics( + self, + request: Optional[ + Union[export_config.ExportAnalyticsMetricsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2alpha + + def sample_export_analytics_metrics(): + # Create a client + client = retail_v2alpha.AnalyticsServiceClient() + + # Initialize request argument(s) + output_config = retail_v2alpha.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2alpha.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsRequest, dict]): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsResponse` Response of the ExportAnalyticsMetricsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a export_config.ExportAnalyticsMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, export_config.ExportAnalyticsMetricsRequest): + request = export_config.ExportAnalyticsMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_analytics_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("catalog", request.catalog),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + export_config.ExportAnalyticsMetricsResponse, + metadata_type=export_config.ExportMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AnalyticsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AnalyticsServiceClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/__init__.py new file mode 100644 index 000000000000..478813851631 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AnalyticsServiceTransport +from .grpc import AnalyticsServiceGrpcTransport +from .grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport +from .rest import AnalyticsServiceRestInterceptor, AnalyticsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsServiceTransport]] +_transport_registry["grpc"] = AnalyticsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AnalyticsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AnalyticsServiceRestTransport + +__all__ = ( + "AnalyticsServiceTransport", + "AnalyticsServiceGrpcTransport", + "AnalyticsServiceGrpcAsyncIOTransport", + "AnalyticsServiceRestTransport", + "AnalyticsServiceRestInterceptor", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/base.py new file mode 100644 index 000000000000..cdbd17a273a8 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2alpha import gapic_version as package_version +from google.cloud.retail_v2alpha.types import export_config + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AnalyticsServiceTransport(abc.ABC): + """Abstract transport class for AnalyticsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "retail.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.export_analytics_metrics: gapic_v1.method.wrap_method( + self.export_analytics_metrics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AnalyticsServiceTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc.py new file mode 100644 index 000000000000..58e1b425de67 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc.py @@ -0,0 +1,326 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.retail_v2alpha.types import export_config + +from .base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport + + +class AnalyticsServiceGrpcTransport(AnalyticsServiceTransport): + """gRPC backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export analytics metrics method over gRPC. + + Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + Returns: + Callable[[~.ExportAnalyticsMetricsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_analytics_metrics" not in self._stubs: + self._stubs["export_analytics_metrics"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.AnalyticsService/ExportAnalyticsMetrics", + request_serializer=export_config.ExportAnalyticsMetricsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_analytics_metrics"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AnalyticsServiceGrpcTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..dc7585fb4eca --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/grpc_asyncio.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.retail_v2alpha.types import export_config + +from .base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .grpc import AnalyticsServiceGrpcTransport + + +class AnalyticsServiceGrpcAsyncIOTransport(AnalyticsServiceTransport): + """gRPC AsyncIO backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export analytics metrics method over gRPC. + + Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + Returns: + Callable[[~.ExportAnalyticsMetricsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_analytics_metrics" not in self._stubs: + self._stubs["export_analytics_metrics"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2alpha.AnalyticsService/ExportAnalyticsMetrics", + request_serializer=export_config.ExportAnalyticsMetricsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_analytics_metrics"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("AnalyticsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/rest.py new file mode 100644 index 000000000000..b9d2eb6ffb4a --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/services/analytics_service/transports/rest.py @@ -0,0 +1,595 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2alpha.types import export_config + +from .base import AnalyticsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AnalyticsServiceRestInterceptor: + """Interceptor for AnalyticsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AnalyticsServiceRestTransport. + + .. code-block:: python + class MyCustomAnalyticsServiceInterceptor(AnalyticsServiceRestInterceptor): + def pre_export_analytics_metrics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_analytics_metrics(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AnalyticsServiceRestTransport(interceptor=MyCustomAnalyticsServiceInterceptor()) + client = AnalyticsServiceClient(transport=transport) + + + """ + + def pre_export_analytics_metrics( + self, + request: export_config.ExportAnalyticsMetricsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[export_config.ExportAnalyticsMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_analytics_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_export_analytics_metrics( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_analytics_metrics + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AnalyticsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AnalyticsServiceRestInterceptor + + +class AnalyticsServiceRestTransport(AnalyticsServiceTransport): + """REST backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AnalyticsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AnalyticsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/places/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ExportAnalyticsMetrics(AnalyticsServiceRestStub): + def __hash__(self): + return hash("ExportAnalyticsMetrics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: export_config.ExportAnalyticsMetricsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export analytics metrics method over HTTP. + + Args: + request (~.export_config.ExportAnalyticsMetricsRequest): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2alpha/{catalog=projects/*/locations/*/catalogs/*}:exportAnalyticsMetrics", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_analytics_metrics( + request, metadata + ) + pb_request = export_config.ExportAnalyticsMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_analytics_metrics(resp) + return resp + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportAnalyticsMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AnalyticsServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/branches/*/places/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AnalyticsServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2alpha/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AnalyticsServiceRestTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py index 5badc78e5e9f..3a8f833ce740 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/__init__.py @@ -70,11 +70,14 @@ ) from .export_config import ( BigQueryOutputResult, + ExportAnalyticsMetricsRequest, + ExportAnalyticsMetricsResponse, ExportErrorsConfig, ExportMetadata, ExportProductsResponse, ExportUserEventsResponse, GcsOutputResult, + OutputConfig, OutputResult, ) from .import_config import ( @@ -226,11 +229,14 @@ "ListControlsResponse", "UpdateControlRequest", "BigQueryOutputResult", + "ExportAnalyticsMetricsRequest", + "ExportAnalyticsMetricsResponse", "ExportErrorsConfig", "ExportMetadata", "ExportProductsResponse", "ExportUserEventsResponse", "GcsOutputResult", + "OutputConfig", "OutputResult", "BigQuerySource", "CompletionDataInputConfig", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/analytics_service.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/analytics_service.py new file mode 100644 index 000000000000..7faf39a115a4 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/analytics_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.retail.v2alpha", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/export_config.py b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/export_config.py index 492ac2cca17c..57cdd145ae2e 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/export_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2alpha/types/export_config.py @@ -24,10 +24,13 @@ __protobuf__ = proto.module( package="google.cloud.retail.v2alpha", manifest={ + "OutputConfig", "ExportErrorsConfig", + "ExportAnalyticsMetricsRequest", "ExportMetadata", "ExportProductsResponse", "ExportUserEventsResponse", + "ExportAnalyticsMetricsResponse", "OutputResult", "BigQueryOutputResult", "GcsOutputResult", @@ -35,6 +38,93 @@ ) +class OutputConfig(proto.Message): + r"""The output configuration setting. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.retail_v2alpha.types.OutputConfig.GcsDestination): + The Google Cloud Storage location where the + output is to be written to. + + This field is a member of `oneof`_ ``destination``. + bigquery_destination (google.cloud.retail_v2alpha.types.OutputConfig.BigQueryDestination): + The BigQuery location where the output is to + be written to. + + This field is a member of `oneof`_ ``destination``. + """ + + class GcsDestination(proto.Message): + r"""The Google Cloud Storage output destination configuration. + + Attributes: + output_uri_prefix (str): + Required. The output uri prefix for saving output data to + json files. Some mapping examples are as follows: + output_uri_prefix sample output(assuming the object is + foo.json) ======================== + ============================================= gs://bucket/ + gs://bucket/foo.json gs://bucket/folder/ + gs://bucket/folder/foo.json gs://bucket/folder/item\_ + gs://bucket/folder/item_foo.json + """ + + output_uri_prefix: str = proto.Field( + proto.STRING, + number=1, + ) + + class BigQueryDestination(proto.Message): + r"""The BigQuery output destination configuration. + + Attributes: + dataset_id (str): + Required. The ID of a BigQuery Dataset. + table_id_prefix (str): + Required. The prefix of exported BigQuery + tables. + table_type (str): + Required. Describes the table type. The following values are + supported: + + - ``table``: A BigQuery native table. + - ``view``: A virtual table defined by a SQL query. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + table_id_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + table_type: str = proto.Field( + proto.STRING, + number=3, + ) + + gcs_destination: GcsDestination = proto.Field( + proto.MESSAGE, + number=1, + oneof="destination", + message=GcsDestination, + ) + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="destination", + message=BigQueryDestination, + ) + + class ExportErrorsConfig(proto.Message): r"""Configuration of destination for Export related errors. @@ -57,6 +147,52 @@ class ExportErrorsConfig(proto.Message): ) +class ExportAnalyticsMetricsRequest(proto.Message): + r"""Request message for the ``ExportAnalyticsMetrics`` method. + + Attributes: + catalog (str): + Required. Full resource name of the parent catalog. Expected + format: ``projects/*/locations/*/catalogs/*`` + output_config (google.cloud.retail_v2alpha.types.OutputConfig): + Required. The output location of the data. + filter (str): + A filtering expression to specify restrictions on returned + metrics. The expression is a sequence of terms. Each term + applies a restriction to the returned metrics. Use this + expression to restrict results to a specific time range. + + Currently we expect only one types of fields: + + :: + + * `timestamp`: This can be specified twice, once with a + less than operator and once with a greater than operator. The + `timestamp` restriction should result in one, contiguous, valid, + `timestamp` range. + + Some examples of valid filters expressions: + + - Example 1: + ``timestamp > "2012-04-23T18:25:43.511Z" timestamp < "2012-04-23T18:30:43.511Z"`` + - Example 2: ``timestamp > "2012-04-23T18:25:43.511Z"`` + """ + + catalog: str = proto.Field( + proto.STRING, + number=1, + ) + output_config: "OutputConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="OutputConfig", + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + class ExportMetadata(proto.Message): r"""Metadata related to the progress of the Export operation. This is returned by the google.longrunning.Operation.metadata @@ -150,6 +286,40 @@ class ExportUserEventsResponse(proto.Message): ) +class ExportAnalyticsMetricsResponse(proto.Message): + r"""Response of the ExportAnalyticsMetricsRequest. If the long + running operation was successful, then this message is returned + by the google.longrunning.Operations.response field if the + operation was successful. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + errors_config (google.cloud.retail_v2alpha.types.ExportErrorsConfig): + This field is never set. + output_result (google.cloud.retail_v2alpha.types.OutputResult): + Output result indicating where the data were + exported to. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + errors_config: "ExportErrorsConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ExportErrorsConfig", + ) + output_result: "OutputResult" = proto.Field( + proto.MESSAGE, + number=3, + message="OutputResult", + ) + + class OutputResult(proto.Message): r"""Output result that stores the information about where the exported data is stored. diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py index 1b18036a3c66..b5ddc405b44c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.analytics_service import ( + AnalyticsServiceAsyncClient, + AnalyticsServiceClient, +) from .services.catalog_service import CatalogServiceAsyncClient, CatalogServiceClient from .services.completion_service import ( CompletionServiceAsyncClient, @@ -96,11 +100,14 @@ ) from .types.export_config import ( BigQueryOutputResult, + ExportAnalyticsMetricsRequest, + ExportAnalyticsMetricsResponse, ExportErrorsConfig, ExportMetadata, ExportProductsResponse, ExportUserEventsResponse, GcsOutputResult, + OutputConfig, OutputResult, ) from .types.import_config import ( @@ -194,6 +201,7 @@ ) __all__ = ( + "AnalyticsServiceAsyncClient", "CatalogServiceAsyncClient", "CompletionServiceAsyncClient", "ControlServiceAsyncClient", @@ -211,6 +219,7 @@ "AddLocalInventoriesMetadata", "AddLocalInventoriesRequest", "AddLocalInventoriesResponse", + "AnalyticsServiceClient", "AttributeConfigLevel", "AttributesConfig", "Audience", @@ -243,6 +252,8 @@ "DeleteProductRequest", "DeleteServingConfigRequest", "ExperimentInfo", + "ExportAnalyticsMetricsRequest", + "ExportAnalyticsMetricsResponse", "ExportErrorsConfig", "ExportMetadata", "ExportProductsResponse", @@ -284,6 +295,7 @@ "MerchantCenterLinkingConfig", "Model", "ModelServiceClient", + "OutputConfig", "OutputResult", "PauseModelRequest", "PredictRequest", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json index 657890025fc8..39759c99190c 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_metadata.json @@ -5,6 +5,40 @@ "protoPackage": "google.cloud.retail.v2beta", "schema": "1.0", "services": { + "AnalyticsService": { + "clients": { + "grpc": { + "libraryClient": "AnalyticsServiceClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AnalyticsServiceAsyncClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + }, + "rest": { + "libraryClient": "AnalyticsServiceClient", + "rpcs": { + "ExportAnalyticsMetrics": { + "methods": [ + "export_analytics_metrics" + ] + } + } + } + } + }, "CatalogService": { "clients": { "grpc": { diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py index 3293978284a9..1d1b45f2c68a 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/__init__.py new file mode 100644 index 000000000000..a4564e6dcc53 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AnalyticsServiceAsyncClient +from .client import AnalyticsServiceClient + +__all__ = ( + "AnalyticsServiceClient", + "AnalyticsServiceAsyncClient", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py new file mode 100644 index 000000000000..efd18fe72dac --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/async_client.py @@ -0,0 +1,459 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2beta.types import export_config + +from .client import AnalyticsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .transports.grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport + + +class AnalyticsServiceAsyncClient: + """Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + """ + + _client: AnalyticsServiceClient + + DEFAULT_ENDPOINT = AnalyticsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AnalyticsServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + AnalyticsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AnalyticsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AnalyticsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AnalyticsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AnalyticsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AnalyticsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AnalyticsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AnalyticsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AnalyticsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AnalyticsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceAsyncClient: The constructed client. + """ + return AnalyticsServiceClient.from_service_account_info.__func__(AnalyticsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceAsyncClient: The constructed client. + """ + return AnalyticsServiceClient.from_service_account_file.__func__(AnalyticsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AnalyticsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AnalyticsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AnalyticsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AnalyticsServiceClient).get_transport_class, type(AnalyticsServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AnalyticsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the analytics service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AnalyticsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AnalyticsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def export_analytics_metrics( + self, + request: Optional[ + Union[export_config.ExportAnalyticsMetricsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2beta + + async def sample_export_analytics_metrics(): + # Create a client + client = retail_v2beta.AnalyticsServiceAsyncClient() + + # Initialize request argument(s) + output_config = retail_v2beta.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2beta.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.retail_v2beta.types.ExportAnalyticsMetricsRequest, dict]]): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2beta.types.ExportAnalyticsMetricsResponse` Response of the ExportAnalyticsMetricsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + request = export_config.ExportAnalyticsMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_analytics_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("catalog", request.catalog),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + export_config.ExportAnalyticsMetricsResponse, + metadata_type=export_config.ExportMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AnalyticsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AnalyticsServiceAsyncClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py new file mode 100644 index 000000000000..4691ae34decb --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/client.py @@ -0,0 +1,666 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2beta.types import export_config + +from .transports.base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .transports.grpc import AnalyticsServiceGrpcTransport +from .transports.grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport +from .transports.rest import AnalyticsServiceRestTransport + + +class AnalyticsServiceClientMeta(type): + """Metaclass for the AnalyticsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AnalyticsServiceTransport]] + _transport_registry["grpc"] = AnalyticsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AnalyticsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AnalyticsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AnalyticsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AnalyticsServiceClient(metaclass=AnalyticsServiceClientMeta): + """Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "retail.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AnalyticsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AnalyticsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AnalyticsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AnalyticsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the analytics service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AnalyticsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AnalyticsServiceTransport): + # transport is a AnalyticsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def export_analytics_metrics( + self, + request: Optional[ + Union[export_config.ExportAnalyticsMetricsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import retail_v2beta + + def sample_export_analytics_metrics(): + # Create a client + client = retail_v2beta.AnalyticsServiceClient() + + # Initialize request argument(s) + output_config = retail_v2beta.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2beta.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.retail_v2beta.types.ExportAnalyticsMetricsRequest, dict]): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.retail_v2beta.types.ExportAnalyticsMetricsResponse` Response of the ExportAnalyticsMetricsRequest. If the long running + operation was successful, then this message is + returned by the + google.longrunning.Operations.response field if the + operation was successful. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a export_config.ExportAnalyticsMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, export_config.ExportAnalyticsMetricsRequest): + request = export_config.ExportAnalyticsMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_analytics_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("catalog", request.catalog),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + export_config.ExportAnalyticsMetricsResponse, + metadata_type=export_config.ExportMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AnalyticsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AnalyticsServiceClient",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/__init__.py new file mode 100644 index 000000000000..478813851631 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AnalyticsServiceTransport +from .grpc import AnalyticsServiceGrpcTransport +from .grpc_asyncio import AnalyticsServiceGrpcAsyncIOTransport +from .rest import AnalyticsServiceRestInterceptor, AnalyticsServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsServiceTransport]] +_transport_registry["grpc"] = AnalyticsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AnalyticsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AnalyticsServiceRestTransport + +__all__ = ( + "AnalyticsServiceTransport", + "AnalyticsServiceGrpcTransport", + "AnalyticsServiceGrpcAsyncIOTransport", + "AnalyticsServiceRestTransport", + "AnalyticsServiceRestInterceptor", +) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/base.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/base.py new file mode 100644 index 000000000000..467dc2b428bf --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.retail_v2beta import gapic_version as package_version +from google.cloud.retail_v2beta.types import export_config + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AnalyticsServiceTransport(abc.ABC): + """Abstract transport class for AnalyticsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "retail.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.export_analytics_metrics: gapic_v1.method.wrap_method( + self.export_analytics_metrics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AnalyticsServiceTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc.py new file mode 100644 index 000000000000..4c52a7fcc166 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc.py @@ -0,0 +1,326 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.retail_v2beta.types import export_config + +from .base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport + + +class AnalyticsServiceGrpcTransport(AnalyticsServiceTransport): + """gRPC backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export analytics metrics method over gRPC. + + Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + Returns: + Callable[[~.ExportAnalyticsMetricsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_analytics_metrics" not in self._stubs: + self._stubs["export_analytics_metrics"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2beta.AnalyticsService/ExportAnalyticsMetrics", + request_serializer=export_config.ExportAnalyticsMetricsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_analytics_metrics"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AnalyticsServiceGrpcTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc_asyncio.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7b7ab93f9f4e --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/grpc_asyncio.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.retail_v2beta.types import export_config + +from .base import DEFAULT_CLIENT_INFO, AnalyticsServiceTransport +from .grpc import AnalyticsServiceGrpcTransport + + +class AnalyticsServiceGrpcAsyncIOTransport(AnalyticsServiceTransport): + """gRPC AsyncIO backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export analytics metrics method over gRPC. + + Exports analytics metrics. + + ``Operation.response`` is of type + ``ExportAnalyticsMetricsResponse``. ``Operation.metadata`` is of + type ``ExportMetadata``. + + Returns: + Callable[[~.ExportAnalyticsMetricsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_analytics_metrics" not in self._stubs: + self._stubs["export_analytics_metrics"] = self.grpc_channel.unary_unary( + "/google.cloud.retail.v2beta.AnalyticsService/ExportAnalyticsMetrics", + request_serializer=export_config.ExportAnalyticsMetricsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_analytics_metrics"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("AnalyticsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/rest.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/rest.py new file mode 100644 index 000000000000..13589fdaf7d5 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/services/analytics_service/transports/rest.py @@ -0,0 +1,587 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.retail_v2beta.types import export_config + +from .base import AnalyticsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AnalyticsServiceRestInterceptor: + """Interceptor for AnalyticsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AnalyticsServiceRestTransport. + + .. code-block:: python + class MyCustomAnalyticsServiceInterceptor(AnalyticsServiceRestInterceptor): + def pre_export_analytics_metrics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_analytics_metrics(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AnalyticsServiceRestTransport(interceptor=MyCustomAnalyticsServiceInterceptor()) + client = AnalyticsServiceClient(transport=transport) + + + """ + + def pre_export_analytics_metrics( + self, + request: export_config.ExportAnalyticsMetricsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[export_config.ExportAnalyticsMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_analytics_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_export_analytics_metrics( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_analytics_metrics + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AnalyticsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AnalyticsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AnalyticsServiceRestInterceptor + + +class AnalyticsServiceRestTransport(AnalyticsServiceTransport): + """REST backend transport for AnalyticsService. + + Service for managing & accessing retail search business + metric. Retail recommendation business metric is currently not + available. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "retail.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AnalyticsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AnalyticsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ExportAnalyticsMetrics(AnalyticsServiceRestStub): + def __hash__(self): + return hash("ExportAnalyticsMetrics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: export_config.ExportAnalyticsMetricsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export analytics metrics method over HTTP. + + Args: + request (~.export_config.ExportAnalyticsMetricsRequest): + The request object. Request message for the ``ExportAnalyticsMetrics`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta/{catalog=projects/*/locations/*/catalogs/*}:exportAnalyticsMetrics", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_analytics_metrics( + request, metadata + ) + pb_request = export_config.ExportAnalyticsMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_analytics_metrics(resp) + return resp + + @property + def export_analytics_metrics( + self, + ) -> Callable[ + [export_config.ExportAnalyticsMetricsRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportAnalyticsMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AnalyticsServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AnalyticsServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*/catalogs/*}/operations", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v2beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AnalyticsServiceRestTransport",) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py index 7d5438443c8b..f4cf109542b5 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/__init__.py @@ -70,11 +70,14 @@ ) from .export_config import ( BigQueryOutputResult, + ExportAnalyticsMetricsRequest, + ExportAnalyticsMetricsResponse, ExportErrorsConfig, ExportMetadata, ExportProductsResponse, ExportUserEventsResponse, GcsOutputResult, + OutputConfig, OutputResult, ) from .import_config import ( @@ -208,11 +211,14 @@ "ListControlsResponse", "UpdateControlRequest", "BigQueryOutputResult", + "ExportAnalyticsMetricsRequest", + "ExportAnalyticsMetricsResponse", "ExportErrorsConfig", "ExportMetadata", "ExportProductsResponse", "ExportUserEventsResponse", "GcsOutputResult", + "OutputConfig", "OutputResult", "BigQuerySource", "CompletionDataInputConfig", diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/analytics_service.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/analytics_service.py new file mode 100644 index 000000000000..99089a2d9d86 --- /dev/null +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/analytics_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.retail.v2beta", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/export_config.py b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/export_config.py index f9c69b039c33..b79dc4fd1874 100644 --- a/packages/google-cloud-retail/google/cloud/retail_v2beta/types/export_config.py +++ b/packages/google-cloud-retail/google/cloud/retail_v2beta/types/export_config.py @@ -24,10 +24,13 @@ __protobuf__ = proto.module( package="google.cloud.retail.v2beta", manifest={ + "OutputConfig", "ExportErrorsConfig", + "ExportAnalyticsMetricsRequest", "ExportMetadata", "ExportProductsResponse", "ExportUserEventsResponse", + "ExportAnalyticsMetricsResponse", "OutputResult", "BigQueryOutputResult", "GcsOutputResult", @@ -35,6 +38,93 @@ ) +class OutputConfig(proto.Message): + r"""The output configuration setting. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_destination (google.cloud.retail_v2beta.types.OutputConfig.GcsDestination): + The Google Cloud Storage location where the + output is to be written to. + + This field is a member of `oneof`_ ``destination``. + bigquery_destination (google.cloud.retail_v2beta.types.OutputConfig.BigQueryDestination): + The BigQuery location where the output is to + be written to. + + This field is a member of `oneof`_ ``destination``. + """ + + class GcsDestination(proto.Message): + r"""The Google Cloud Storage output destination configuration. + + Attributes: + output_uri_prefix (str): + Required. The output uri prefix for saving output data to + json files. Some mapping examples are as follows: + output_uri_prefix sample output(assuming the object is + foo.json) ======================== + ============================================= gs://bucket/ + gs://bucket/foo.json gs://bucket/folder/ + gs://bucket/folder/foo.json gs://bucket/folder/item\_ + gs://bucket/folder/item_foo.json + """ + + output_uri_prefix: str = proto.Field( + proto.STRING, + number=1, + ) + + class BigQueryDestination(proto.Message): + r"""The BigQuery output destination configuration. + + Attributes: + dataset_id (str): + Required. The ID of a BigQuery Dataset. + table_id_prefix (str): + Required. The prefix of exported BigQuery + tables. + table_type (str): + Required. Describes the table type. The following values are + supported: + + - ``table``: A BigQuery native table. + - ``view``: A virtual table defined by a SQL query. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + table_id_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + table_type: str = proto.Field( + proto.STRING, + number=3, + ) + + gcs_destination: GcsDestination = proto.Field( + proto.MESSAGE, + number=1, + oneof="destination", + message=GcsDestination, + ) + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="destination", + message=BigQueryDestination, + ) + + class ExportErrorsConfig(proto.Message): r"""Configuration of destination for Export related errors. @@ -57,6 +147,52 @@ class ExportErrorsConfig(proto.Message): ) +class ExportAnalyticsMetricsRequest(proto.Message): + r"""Request message for the ``ExportAnalyticsMetrics`` method. + + Attributes: + catalog (str): + Required. Full resource name of the parent catalog. Expected + format: ``projects/*/locations/*/catalogs/*`` + output_config (google.cloud.retail_v2beta.types.OutputConfig): + Required. The output location of the data. + filter (str): + A filtering expression to specify restrictions on returned + metrics. The expression is a sequence of terms. Each term + applies a restriction to the returned metrics. Use this + expression to restrict results to a specific time range. + + Currently we expect only one types of fields: + + :: + + * `timestamp`: This can be specified twice, once with a + less than operator and once with a greater than operator. The + `timestamp` restriction should result in one, contiguous, valid, + `timestamp` range. + + Some examples of valid filters expressions: + + - Example 1: + ``timestamp > "2012-04-23T18:25:43.511Z" timestamp < "2012-04-23T18:30:43.511Z"`` + - Example 2: ``timestamp > "2012-04-23T18:25:43.511Z"`` + """ + + catalog: str = proto.Field( + proto.STRING, + number=1, + ) + output_config: "OutputConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="OutputConfig", + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + class ExportMetadata(proto.Message): r"""Metadata related to the progress of the Export operation. This is returned by the google.longrunning.Operation.metadata @@ -150,6 +286,40 @@ class ExportUserEventsResponse(proto.Message): ) +class ExportAnalyticsMetricsResponse(proto.Message): + r"""Response of the ExportAnalyticsMetricsRequest. If the long + running operation was successful, then this message is returned + by the google.longrunning.Operations.response field if the + operation was successful. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + errors_config (google.cloud.retail_v2beta.types.ExportErrorsConfig): + This field is never set. + output_result (google.cloud.retail_v2beta.types.OutputResult): + Output result indicating where the data were + exported to. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + errors_config: "ExportErrorsConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ExportErrorsConfig", + ) + output_result: "OutputResult" = proto.Field( + proto.MESSAGE, + number=3, + message="OutputResult", + ) + + class OutputResult(proto.Message): r"""Output result that stores the information about where the exported data is stored. diff --git a/packages/google-cloud-retail/noxfile.py b/packages/google-cloud-retail/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-retail/noxfile.py +++ b/packages/google-cloud-retail/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_async.py new file mode 100644 index 000000000000..3e1e19784ab9 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAnalyticsMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +async def sample_export_analytics_metrics(): + # Create a client + client = retail_v2alpha.AnalyticsServiceAsyncClient() + + # Initialize request argument(s) + output_config = retail_v2alpha.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2alpha.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_sync.py new file mode 100644 index 000000000000..29fb638de768 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2alpha_generated_analytics_service_export_analytics_metrics_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAnalyticsMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2alpha + + +def sample_export_analytics_metrics(): + # Create a client + client = retail_v2alpha.AnalyticsServiceClient() + + # Initialize request argument(s) + output_config = retail_v2alpha.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2alpha.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_async.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_async.py new file mode 100644 index 000000000000..43fe5c8836c9 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAnalyticsMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2beta + + +async def sample_export_analytics_metrics(): + # Create a client + client = retail_v2beta.AnalyticsServiceAsyncClient() + + # Initialize request argument(s) + output_config = retail_v2beta.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2beta.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_async] diff --git a/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_sync.py b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_sync.py new file mode 100644 index 000000000000..ce0cebc899f1 --- /dev/null +++ b/packages/google-cloud-retail/samples/generated_samples/retail_v2beta_generated_analytics_service_export_analytics_metrics_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAnalyticsMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-retail + + +# [START retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import retail_v2beta + + +def sample_export_analytics_metrics(): + # Create a client + client = retail_v2beta.AnalyticsServiceClient() + + # Initialize request argument(s) + output_config = retail_v2beta.OutputConfig() + output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" + + request = retail_v2beta.ExportAnalyticsMetricsRequest( + catalog="catalog_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_analytics_metrics(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_sync] diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json index 729d9bf0f4f5..1a226fec4488 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json index d5dada4605b5..48ffb9b4b586 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2alpha.json @@ -8,9 +8,162 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2alpha.AnalyticsServiceAsyncClient", + "shortName": "AnalyticsServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2alpha.AnalyticsServiceAsyncClient.export_analytics_metrics", + "method": { + "fullName": "google.cloud.retail.v2alpha.AnalyticsService.ExportAnalyticsMetrics", + "service": { + "fullName": "google.cloud.retail.v2alpha.AnalyticsService", + "shortName": "AnalyticsService" + }, + "shortName": "ExportAnalyticsMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_analytics_metrics" + }, + "description": "Sample for ExportAnalyticsMetrics", + "file": "retail_v2alpha_generated_analytics_service_export_analytics_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_analytics_service_export_analytics_metrics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2alpha.AnalyticsServiceClient", + "shortName": "AnalyticsServiceClient" + }, + "fullName": "google.cloud.retail_v2alpha.AnalyticsServiceClient.export_analytics_metrics", + "method": { + "fullName": "google.cloud.retail.v2alpha.AnalyticsService.ExportAnalyticsMetrics", + "service": { + "fullName": "google.cloud.retail.v2alpha.AnalyticsService", + "shortName": "AnalyticsService" + }, + "shortName": "ExportAnalyticsMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2alpha.types.ExportAnalyticsMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_analytics_metrics" + }, + "description": "Sample for ExportAnalyticsMetrics", + "file": "retail_v2alpha_generated_analytics_service_export_analytics_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2alpha_generated_AnalyticsService_ExportAnalyticsMetrics_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2alpha_generated_analytics_service_export_analytics_metrics_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json index 0946b5ba7e7f..20c3915bc1ef 100644 --- a/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json +++ b/packages/google-cloud-retail/samples/generated_samples/snippet_metadata_google.cloud.retail.v2beta.json @@ -8,9 +8,162 @@ ], "language": "PYTHON", "name": "google-cloud-retail", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.retail_v2beta.AnalyticsServiceAsyncClient", + "shortName": "AnalyticsServiceAsyncClient" + }, + "fullName": "google.cloud.retail_v2beta.AnalyticsServiceAsyncClient.export_analytics_metrics", + "method": { + "fullName": "google.cloud.retail.v2beta.AnalyticsService.ExportAnalyticsMetrics", + "service": { + "fullName": "google.cloud.retail.v2beta.AnalyticsService", + "shortName": "AnalyticsService" + }, + "shortName": "ExportAnalyticsMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2beta.types.ExportAnalyticsMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_analytics_metrics" + }, + "description": "Sample for ExportAnalyticsMetrics", + "file": "retail_v2beta_generated_analytics_service_export_analytics_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2beta_generated_analytics_service_export_analytics_metrics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.retail_v2beta.AnalyticsServiceClient", + "shortName": "AnalyticsServiceClient" + }, + "fullName": "google.cloud.retail_v2beta.AnalyticsServiceClient.export_analytics_metrics", + "method": { + "fullName": "google.cloud.retail.v2beta.AnalyticsService.ExportAnalyticsMetrics", + "service": { + "fullName": "google.cloud.retail.v2beta.AnalyticsService", + "shortName": "AnalyticsService" + }, + "shortName": "ExportAnalyticsMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.retail_v2beta.types.ExportAnalyticsMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_analytics_metrics" + }, + "description": "Sample for ExportAnalyticsMetrics", + "file": "retail_v2beta_generated_analytics_service_export_analytics_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "retail_v2beta_generated_AnalyticsService_ExportAnalyticsMetrics_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "retail_v2beta_generated_analytics_service_export_analytics_metrics_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py b/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py index 5d23fabb7314..323a720cedd7 100644 --- a/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py +++ b/packages/google-cloud-retail/scripts/fixup_retail_v2alpha_keywords.py @@ -56,6 +56,7 @@ class retailCallTransformer(cst.CSTTransformer): 'delete_model': ('name', ), 'delete_product': ('name', 'force', ), 'delete_serving_config': ('name', ), + 'export_analytics_metrics': ('catalog', 'output_config', 'filter', ), 'get_attributes_config': ('name', ), 'get_completion_config': ('name', ), 'get_control': ('name', ), diff --git a/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py b/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py index 4610d750569a..395322fdf0e3 100644 --- a/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py +++ b/packages/google-cloud-retail/scripts/fixup_retail_v2beta_keywords.py @@ -54,6 +54,7 @@ class retailCallTransformer(cst.CSTTransformer): 'delete_model': ('name', ), 'delete_product': ('name', ), 'delete_serving_config': ('name', ), + 'export_analytics_metrics': ('catalog', 'output_config', 'filter', ), 'get_attributes_config': ('name', ), 'get_completion_config': ('name', ), 'get_control': ('name', ), diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py new file mode 100644 index 000000000000..8f37498c162b --- /dev/null +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2alpha/test_analytics_service.py @@ -0,0 +1,2318 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.retail_v2alpha.services.analytics_service import ( + AnalyticsServiceAsyncClient, + AnalyticsServiceClient, + transports, +) +from google.cloud.retail_v2alpha.types import export_config + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AnalyticsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AnalyticsServiceClient, "grpc"), + (AnalyticsServiceAsyncClient, "grpc_asyncio"), + (AnalyticsServiceClient, "rest"), + ], +) +def test_analytics_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AnalyticsServiceGrpcTransport, "grpc"), + (transports.AnalyticsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AnalyticsServiceRestTransport, "rest"), + ], +) +def test_analytics_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AnalyticsServiceClient, "grpc"), + (AnalyticsServiceAsyncClient, "grpc_asyncio"), + (AnalyticsServiceClient, "rest"), + ], +) +def test_analytics_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +def test_analytics_service_client_get_transport_class(): + transport = AnalyticsServiceClient.get_transport_class() + available_transports = [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceRestTransport, + ] + assert transport in available_transports + + transport = AnalyticsServiceClient.get_transport_class("grpc") + assert transport == transports.AnalyticsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport, "grpc"), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AnalyticsServiceClient, transports.AnalyticsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +def test_analytics_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AnalyticsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AnalyticsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + "true", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_analytics_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AnalyticsServiceClient, AnalyticsServiceAsyncClient] +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +def test_analytics_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport, "grpc"), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AnalyticsServiceClient, transports.AnalyticsServiceRestTransport, "rest"), + ], +) +def test_analytics_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_analytics_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_analytics_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.retail_v2alpha.services.analytics_service.transports.AnalyticsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AnalyticsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_analytics_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + export_config.ExportAnalyticsMetricsRequest, + dict, + ], +) +def test_export_analytics_metrics(request_type, transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_analytics_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + client.export_analytics_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_async( + transport: str = "grpc_asyncio", + request_type=export_config.ExportAnalyticsMetricsRequest, +): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_async_from_dict(): + await test_export_analytics_metrics_async(request_type=dict) + + +def test_export_analytics_metrics_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export_config.ExportAnalyticsMetricsRequest() + + request.catalog = "catalog_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "catalog=catalog_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export_config.ExportAnalyticsMetricsRequest() + + request.catalog = "catalog_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "catalog=catalog_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + export_config.ExportAnalyticsMetricsRequest, + dict, + ], +) +def test_export_analytics_metrics_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"catalog": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_analytics_metrics(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_analytics_metrics_rest_required_fields( + request_type=export_config.ExportAnalyticsMetricsRequest, +): + transport_class = transports.AnalyticsServiceRestTransport + + request_init = {} + request_init["catalog"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_analytics_metrics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["catalog"] = "catalog_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_analytics_metrics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "catalog" in jsonified_request + assert jsonified_request["catalog"] == "catalog_value" + + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_analytics_metrics(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_analytics_metrics_rest_unset_required_fields(): + transport = transports.AnalyticsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_analytics_metrics._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "catalog", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_analytics_metrics_rest_interceptors(null_interceptor): + transport = transports.AnalyticsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsServiceRestInterceptor(), + ) + client = AnalyticsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AnalyticsServiceRestInterceptor, "post_export_analytics_metrics" + ) as post, mock.patch.object( + transports.AnalyticsServiceRestInterceptor, "pre_export_analytics_metrics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = export_config.ExportAnalyticsMetricsRequest.pb( + export_config.ExportAnalyticsMetricsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = export_config.ExportAnalyticsMetricsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_analytics_metrics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_analytics_metrics_rest_bad_request( + transport: str = "rest", request_type=export_config.ExportAnalyticsMetricsRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"catalog": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_analytics_metrics(request) + + +def test_export_analytics_metrics_rest_error(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AnalyticsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AnalyticsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + transports.AnalyticsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AnalyticsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AnalyticsServiceGrpcTransport, + ) + + +def test_analytics_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AnalyticsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_analytics_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.retail_v2alpha.services.analytics_service.transports.AnalyticsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AnalyticsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "export_analytics_metrics", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_analytics_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.retail_v2alpha.services.analytics_service.transports.AnalyticsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AnalyticsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_analytics_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.retail_v2alpha.services.analytics_service.transports.AnalyticsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AnalyticsServiceTransport() + adc.assert_called_once() + + +def test_analytics_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AnalyticsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + transports.AnalyticsServiceRestTransport, + ], +) +def test_analytics_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AnalyticsServiceGrpcTransport, grpc_helpers), + (transports.AnalyticsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_analytics_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_analytics_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AnalyticsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_analytics_service_rest_lro_client(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_analytics_service_host_no_port(transport_name): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_analytics_service_host_with_port(transport_name): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_analytics_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AnalyticsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AnalyticsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.export_analytics_metrics._session + session2 = client2.transport.export_analytics_metrics._session + assert session1 != session2 + + +def test_analytics_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AnalyticsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_analytics_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AnalyticsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_analytics_service_grpc_lro_client(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_analytics_service_grpc_lro_async_client(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AnalyticsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AnalyticsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AnalyticsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AnalyticsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AnalyticsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AnalyticsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = AnalyticsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AnalyticsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AnalyticsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AnalyticsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AnalyticsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AnalyticsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AnalyticsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/catalogs/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport), + (AnalyticsServiceAsyncClient, transports.AnalyticsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py new file mode 100644 index 000000000000..da93e9c8b075 --- /dev/null +++ b/packages/google-cloud-retail/tests/unit/gapic/retail_v2beta/test_analytics_service.py @@ -0,0 +1,2318 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.retail_v2beta.services.analytics_service import ( + AnalyticsServiceAsyncClient, + AnalyticsServiceClient, + transports, +) +from google.cloud.retail_v2beta.types import export_config + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AnalyticsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AnalyticsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AnalyticsServiceClient, "grpc"), + (AnalyticsServiceAsyncClient, "grpc_asyncio"), + (AnalyticsServiceClient, "rest"), + ], +) +def test_analytics_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AnalyticsServiceGrpcTransport, "grpc"), + (transports.AnalyticsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AnalyticsServiceRestTransport, "rest"), + ], +) +def test_analytics_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AnalyticsServiceClient, "grpc"), + (AnalyticsServiceAsyncClient, "grpc_asyncio"), + (AnalyticsServiceClient, "rest"), + ], +) +def test_analytics_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +def test_analytics_service_client_get_transport_class(): + transport = AnalyticsServiceClient.get_transport_class() + available_transports = [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceRestTransport, + ] + assert transport in available_transports + + transport = AnalyticsServiceClient.get_transport_class("grpc") + assert transport == transports.AnalyticsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport, "grpc"), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AnalyticsServiceClient, transports.AnalyticsServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +def test_analytics_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AnalyticsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AnalyticsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + "true", + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_analytics_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AnalyticsServiceClient, AnalyticsServiceAsyncClient] +) +@mock.patch.object( + AnalyticsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceClient), +) +@mock.patch.object( + AnalyticsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AnalyticsServiceAsyncClient), +) +def test_analytics_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport, "grpc"), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AnalyticsServiceClient, transports.AnalyticsServiceRestTransport, "rest"), + ], +) +def test_analytics_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AnalyticsServiceClient, + transports.AnalyticsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_analytics_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_analytics_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.retail_v2beta.services.analytics_service.transports.AnalyticsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AnalyticsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AnalyticsServiceClient, + transports.AnalyticsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AnalyticsServiceAsyncClient, + transports.AnalyticsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_analytics_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + export_config.ExportAnalyticsMetricsRequest, + dict, + ], +) +def test_export_analytics_metrics(request_type, transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_analytics_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + client.export_analytics_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_async( + transport: str = "grpc_asyncio", + request_type=export_config.ExportAnalyticsMetricsRequest, +): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == export_config.ExportAnalyticsMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_async_from_dict(): + await test_export_analytics_metrics_async(request_type=dict) + + +def test_export_analytics_metrics_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export_config.ExportAnalyticsMetricsRequest() + + request.catalog = "catalog_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "catalog=catalog_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_analytics_metrics_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export_config.ExportAnalyticsMetricsRequest() + + request.catalog = "catalog_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_analytics_metrics), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_analytics_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "catalog=catalog_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + export_config.ExportAnalyticsMetricsRequest, + dict, + ], +) +def test_export_analytics_metrics_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"catalog": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_analytics_metrics(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_analytics_metrics_rest_required_fields( + request_type=export_config.ExportAnalyticsMetricsRequest, +): + transport_class = transports.AnalyticsServiceRestTransport + + request_init = {} + request_init["catalog"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_analytics_metrics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["catalog"] = "catalog_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_analytics_metrics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "catalog" in jsonified_request + assert jsonified_request["catalog"] == "catalog_value" + + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_analytics_metrics(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_analytics_metrics_rest_unset_required_fields(): + transport = transports.AnalyticsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_analytics_metrics._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "catalog", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_analytics_metrics_rest_interceptors(null_interceptor): + transport = transports.AnalyticsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsServiceRestInterceptor(), + ) + client = AnalyticsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AnalyticsServiceRestInterceptor, "post_export_analytics_metrics" + ) as post, mock.patch.object( + transports.AnalyticsServiceRestInterceptor, "pre_export_analytics_metrics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = export_config.ExportAnalyticsMetricsRequest.pb( + export_config.ExportAnalyticsMetricsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = export_config.ExportAnalyticsMetricsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_analytics_metrics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_analytics_metrics_rest_bad_request( + transport: str = "rest", request_type=export_config.ExportAnalyticsMetricsRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"catalog": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_analytics_metrics(request) + + +def test_export_analytics_metrics_rest_error(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AnalyticsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AnalyticsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AnalyticsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AnalyticsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + transports.AnalyticsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AnalyticsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AnalyticsServiceGrpcTransport, + ) + + +def test_analytics_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AnalyticsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_analytics_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.retail_v2beta.services.analytics_service.transports.AnalyticsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AnalyticsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "export_analytics_metrics", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_analytics_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.retail_v2beta.services.analytics_service.transports.AnalyticsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AnalyticsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_analytics_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.retail_v2beta.services.analytics_service.transports.AnalyticsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AnalyticsServiceTransport() + adc.assert_called_once() + + +def test_analytics_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AnalyticsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + transports.AnalyticsServiceRestTransport, + ], +) +def test_analytics_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AnalyticsServiceGrpcTransport, grpc_helpers), + (transports.AnalyticsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_analytics_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "retail.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="retail.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_analytics_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AnalyticsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_analytics_service_rest_lro_client(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_analytics_service_host_no_port(transport_name): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_analytics_service_host_with_port(transport_name): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="retail.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "retail.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://retail.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_analytics_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AnalyticsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AnalyticsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.export_analytics_metrics._session + session2 = client2.transport.export_analytics_metrics._session + assert session1 != session2 + + +def test_analytics_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AnalyticsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_analytics_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AnalyticsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AnalyticsServiceGrpcTransport, + transports.AnalyticsServiceGrpcAsyncIOTransport, + ], +) +def test_analytics_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_analytics_service_grpc_lro_client(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_analytics_service_grpc_lro_async_client(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AnalyticsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AnalyticsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AnalyticsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AnalyticsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AnalyticsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AnalyticsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = AnalyticsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AnalyticsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AnalyticsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AnalyticsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AnalyticsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AnalyticsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AnalyticsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/catalogs/sample3/branches/sample4/operations/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/catalogs/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/catalogs/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AnalyticsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AnalyticsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AnalyticsServiceClient, transports.AnalyticsServiceGrpcTransport), + (AnalyticsServiceAsyncClient, transports.AnalyticsServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-securitycentermanagement/CHANGELOG.md b/packages/google-cloud-securitycentermanagement/CHANGELOG.md index 43c9be277472..4ea2e5eafa3b 100644 --- a/packages/google-cloud-securitycentermanagement/CHANGELOG.md +++ b/packages/google-cloud-securitycentermanagement/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.2...google-cloud-securitycentermanagement-v0.1.3) (2024-01-19) + + +### Documentation + +* [google-cloud-securitycentermanagement] update documentation for UpdateSecurityHealthAnalyticsCustomModule update_mask field ([#12196](https://github.com/googleapis/google-cloud-python/issues/12196)) ([c7cf0a1](https://github.com/googleapis/google-cloud-python/commit/c7cf0a1c754091fb5b141dd7a9238c63f9d1f36e)) + +## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.1...google-cloud-securitycentermanagement-v0.1.2) (2024-01-08) + + +### Documentation + +* [google-cloud-securitycentermanagement] updates on multiple comments, syncing terminology and clarifying some aspects ([#12151](https://github.com/googleapis/google-cloud-python/issues/12151)) ([461c76b](https://github.com/googleapis/google-cloud-python/commit/461c76bbc6bd7cda3ef6da0a0ec7e2418c1532aa)) + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-securitycentermanagement-v0.1.0...google-cloud-securitycentermanagement-v0.1.1) (2024-01-04) + + +### Documentation + +* [google-cloud-securitycentermanagement] clarify several RPC descriptions ([#12146](https://github.com/googleapis/google-cloud-python/issues/12146)) ([a7e4920](https://github.com/googleapis/google-cloud-python/commit/a7e492084f88c72d77127d6adf9feb537362ca18)) + ## 0.1.0 (2023-12-07) diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py index a7d39deb7a45..536d6648a6f0 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py index a7d39deb7a45..536d6648a6f0 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py index 1b8751f438b4..757fa3ca3f03 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/async_client.py @@ -1140,13 +1140,11 @@ async def sample_update_security_health_analytics_custom_module(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the SecurityHealthAnalyticsCustomModule - resource by the update. The fields specified in the - update_mask are relative to the resource, not the full - request. A field will be overwritten if it is in the - mask. If the user does not provide a mask then all - fields will be overwritten. + Required. The list of fields to be updated. The only + fields that can be updated are ``enablement_state`` and + ``custom_config``. If empty or set to the wildcard value + ``*``, both ``enablement_state`` and ``custom_config`` + are updated. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py index a2bdb1391fa4..7f80e3cbc3e1 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/services/security_center_management/client.py @@ -1408,13 +1408,11 @@ def sample_update_security_health_analytics_custom_module(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the SecurityHealthAnalyticsCustomModule - resource by the update. The fields specified in the - update_mask are relative to the resource, not the full - request. A field will be overwritten if it is in the - mask. If the user does not provide a mask then all - fields will be overwritten. + Required. The list of fields to be updated. The only + fields that can be updated are ``enablement_state`` and + ``custom_config``. If empty or set to the wildcard value + ``*``, both ``enablement_state`` and ``custom_config`` + are updated. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py index d748fa095475..6cb1492a0437 100644 --- a/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py +++ b/packages/google-cloud-securitycentermanagement/google/cloud/securitycentermanagement_v1/types/security_center_management.py @@ -673,13 +673,11 @@ class UpdateSecurityHealthAnalyticsCustomModuleRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the SecurityHealthAnalyticsCustomModule - resource by the update. The fields specified in the - update_mask are relative to the resource, not the full - request. A field will be overwritten if it is in the mask. - If the user does not provide a mask then all fields will be - overwritten. + Required. The list of fields to be updated. The only fields + that can be updated are ``enablement_state`` and + ``custom_config``. If empty or set to the wildcard value + ``*``, both ``enablement_state`` and ``custom_config`` are + updated. security_health_analytics_custom_module (google.cloud.securitycentermanagement_v1.types.SecurityHealthAnalyticsCustomModule): Required. The resource being updated validate_only (bool): diff --git a/packages/google-cloud-securitycentermanagement/noxfile.py b/packages/google-cloud-securitycentermanagement/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-securitycentermanagement/noxfile.py +++ b/packages/google-cloud-securitycentermanagement/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json index 8e50d8fef945..3354006bec29 100644 --- a/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json +++ b/packages/google-cloud-securitycentermanagement/samples/generated_samples/snippet_metadata_google.cloud.securitycentermanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-securitycentermanagement", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-cloud-servicehealth/.OwlBot.yaml b/packages/google-cloud-servicehealth/.OwlBot.yaml new file mode 100644 index 000000000000..b5ae65bc61cc --- /dev/null +++ b/packages/google-cloud-servicehealth/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/servicehealth/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-servicehealth/$1 +api-name: google-cloud-servicehealth diff --git a/packages/google-cloud-servicehealth/.coveragerc b/packages/google-cloud-servicehealth/.coveragerc new file mode 100644 index 000000000000..d463e011be6d --- /dev/null +++ b/packages/google-cloud-servicehealth/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/servicehealth/__init__.py + google/cloud/servicehealth/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-servicehealth/.flake8 b/packages/google-cloud-servicehealth/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-servicehealth/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-servicehealth/.gitignore b/packages/google-cloud-servicehealth/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-servicehealth/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-servicehealth/.repo-metadata.json b/packages/google-cloud-servicehealth/.repo-metadata.json new file mode 100644 index 000000000000..13b275e0b281 --- /dev/null +++ b/packages/google-cloud-servicehealth/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-servicehealth", + "name_pretty": "Service Health API", + "api_description": "Personalized Service Health helps you gain visibility into disruptive events impacting Google Cloud products.", + "product_documentation": "https://cloud.google.com/service-health/docs/overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-servicehealth/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1466723&template=1161103", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-servicehealth", + "api_id": "servicehealth.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "servicehealth" +} diff --git a/packages/google-cloud-servicehealth/CHANGELOG.md b/packages/google-cloud-servicehealth/CHANGELOG.md new file mode 100644 index 000000000000..08bfa165384b --- /dev/null +++ b/packages/google-cloud-servicehealth/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-01-12) + + +### Features + +* add initial files for google.cloud.servicehealth.v1 ([#12189](https://github.com/googleapis/google-cloud-python/issues/12189)) ([6ca7fa2](https://github.com/googleapis/google-cloud-python/commit/6ca7fa209b79f57fce901e049bf2251b2b41e9c1)) + +## Changelog diff --git a/packages/google-cloud-servicehealth/CODE_OF_CONDUCT.md b/packages/google-cloud-servicehealth/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-servicehealth/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-servicehealth/CONTRIBUTING.rst b/packages/google-cloud-servicehealth/CONTRIBUTING.rst new file mode 100644 index 000000000000..ca863340eaa5 --- /dev/null +++ b/packages/google-cloud-servicehealth/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-servicehealth + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-servicehealth/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-servicehealth/LICENSE b/packages/google-cloud-servicehealth/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-servicehealth/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-servicehealth/MANIFEST.in b/packages/google-cloud-servicehealth/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-servicehealth/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-servicehealth/README.rst b/packages/google-cloud-servicehealth/README.rst new file mode 100644 index 000000000000..16e32f6d7c55 --- /dev/null +++ b/packages/google-cloud-servicehealth/README.rst @@ -0,0 +1,108 @@ +Python Client for Service Health API +==================================== + +|preview| |pypi| |versions| + +`Service Health API`_: Personalized Service Health helps you gain visibility into disruptive events impacting Google Cloud products. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-servicehealth.svg + :target: https://pypi.org/project/google-cloud-servicehealth/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-servicehealth.svg + :target: https://pypi.org/project/google-cloud-servicehealth/ +.. _Service Health API: https://cloud.google.com/service-health/docs/overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-servicehealth/latest +.. _Product Documentation: https://cloud.google.com/service-health/docs/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Service Health API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Service Health API.: https://cloud.google.com/service-health/docs/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-servicehealth + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-servicehealth + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Service Health API + to see other available methods on the client. +- Read the `Service Health API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Service Health API Product documentation: https://cloud.google.com/service-health/docs/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-servicehealth/docs/CHANGELOG.md b/packages/google-cloud-servicehealth/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-servicehealth/docs/README.rst b/packages/google-cloud-servicehealth/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-servicehealth/docs/_static/custom.css b/packages/google-cloud-servicehealth/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-servicehealth/docs/_templates/layout.html b/packages/google-cloud-servicehealth/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-servicehealth/docs/conf.py b/packages/google-cloud-servicehealth/docs/conf.py new file mode 100644 index 000000000000..daea0ca749a0 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-servicehealth documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-servicehealth" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-servicehealth", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-servicehealth-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-servicehealth.tex", + "google-cloud-servicehealth Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-servicehealth", + "google-cloud-servicehealth Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-servicehealth", + "google-cloud-servicehealth Documentation", + author, + "google-cloud-servicehealth", + "google-cloud-servicehealth Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-servicehealth/docs/index.rst b/packages/google-cloud-servicehealth/docs/index.rst new file mode 100644 index 000000000000..2bbe39c941b2 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + servicehealth_v1/services_ + servicehealth_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-servicehealth`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-servicehealth/docs/multiprocessing.rst b/packages/google-cloud-servicehealth/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-servicehealth/docs/servicehealth_v1/service_health.rst b/packages/google-cloud-servicehealth/docs/servicehealth_v1/service_health.rst new file mode 100644 index 000000000000..a0f48db46746 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/servicehealth_v1/service_health.rst @@ -0,0 +1,10 @@ +ServiceHealth +------------------------------- + +.. automodule:: google.cloud.servicehealth_v1.services.service_health + :members: + :inherited-members: + +.. automodule:: google.cloud.servicehealth_v1.services.service_health.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-servicehealth/docs/servicehealth_v1/services_.rst b/packages/google-cloud-servicehealth/docs/servicehealth_v1/services_.rst new file mode 100644 index 000000000000..12c412fd7e7f --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/servicehealth_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Servicehealth v1 API +============================================== +.. toctree:: + :maxdepth: 2 + + service_health diff --git a/packages/google-cloud-servicehealth/docs/servicehealth_v1/types_.rst b/packages/google-cloud-servicehealth/docs/servicehealth_v1/types_.rst new file mode 100644 index 000000000000..ff89943738f2 --- /dev/null +++ b/packages/google-cloud-servicehealth/docs/servicehealth_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Servicehealth v1 API +=========================================== + +.. automodule:: google.cloud.servicehealth_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth/__init__.py new file mode 100644 index 000000000000..1ce737667b78 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicehealth import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.servicehealth_v1.services.service_health.async_client import ( + ServiceHealthAsyncClient, +) +from google.cloud.servicehealth_v1.services.service_health.client import ( + ServiceHealthClient, +) +from google.cloud.servicehealth_v1.types.event_resources import ( + Asset, + Event, + EventImpact, + EventUpdate, + EventView, + GetEventRequest, + GetOrganizationEventRequest, + GetOrganizationImpactRequest, + ListEventsRequest, + ListEventsResponse, + ListOrganizationEventsRequest, + ListOrganizationEventsResponse, + ListOrganizationImpactsRequest, + ListOrganizationImpactsResponse, + Location, + OrganizationEvent, + OrganizationEventView, + OrganizationImpact, + Product, +) + +__all__ = ( + "ServiceHealthClient", + "ServiceHealthAsyncClient", + "Asset", + "Event", + "EventImpact", + "EventUpdate", + "GetEventRequest", + "GetOrganizationEventRequest", + "GetOrganizationImpactRequest", + "ListEventsRequest", + "ListEventsResponse", + "ListOrganizationEventsRequest", + "ListOrganizationEventsResponse", + "ListOrganizationImpactsRequest", + "ListOrganizationImpactsResponse", + "Location", + "OrganizationEvent", + "OrganizationImpact", + "Product", + "EventView", + "OrganizationEventView", +) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py new file mode 100644 index 000000000000..a7d39deb7a45 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth/py.typed b/packages/google-cloud-servicehealth/google/cloud/servicehealth/py.typed new file mode 100644 index 000000000000..e1cbbeac17a3 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-servicehealth package uses inline types. diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/__init__.py new file mode 100644 index 000000000000..a23bea06a89e --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.servicehealth_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.service_health import ServiceHealthAsyncClient, ServiceHealthClient +from .types.event_resources import ( + Asset, + Event, + EventImpact, + EventUpdate, + EventView, + GetEventRequest, + GetOrganizationEventRequest, + GetOrganizationImpactRequest, + ListEventsRequest, + ListEventsResponse, + ListOrganizationEventsRequest, + ListOrganizationEventsResponse, + ListOrganizationImpactsRequest, + ListOrganizationImpactsResponse, + Location, + OrganizationEvent, + OrganizationEventView, + OrganizationImpact, + Product, +) + +__all__ = ( + "ServiceHealthAsyncClient", + "Asset", + "Event", + "EventImpact", + "EventUpdate", + "EventView", + "GetEventRequest", + "GetOrganizationEventRequest", + "GetOrganizationImpactRequest", + "ListEventsRequest", + "ListEventsResponse", + "ListOrganizationEventsRequest", + "ListOrganizationEventsResponse", + "ListOrganizationImpactsRequest", + "ListOrganizationImpactsResponse", + "Location", + "OrganizationEvent", + "OrganizationEventView", + "OrganizationImpact", + "Product", + "ServiceHealthClient", +) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_metadata.json b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_metadata.json new file mode 100644 index 000000000000..8f09d1be9eac --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.servicehealth_v1", + "protoPackage": "google.cloud.servicehealth.v1", + "schema": "1.0", + "services": { + "ServiceHealth": { + "clients": { + "grpc": { + "libraryClient": "ServiceHealthClient", + "rpcs": { + "GetEvent": { + "methods": [ + "get_event" + ] + }, + "GetOrganizationEvent": { + "methods": [ + "get_organization_event" + ] + }, + "GetOrganizationImpact": { + "methods": [ + "get_organization_impact" + ] + }, + "ListEvents": { + "methods": [ + "list_events" + ] + }, + "ListOrganizationEvents": { + "methods": [ + "list_organization_events" + ] + }, + "ListOrganizationImpacts": { + "methods": [ + "list_organization_impacts" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServiceHealthAsyncClient", + "rpcs": { + "GetEvent": { + "methods": [ + "get_event" + ] + }, + "GetOrganizationEvent": { + "methods": [ + "get_organization_event" + ] + }, + "GetOrganizationImpact": { + "methods": [ + "get_organization_impact" + ] + }, + "ListEvents": { + "methods": [ + "list_events" + ] + }, + "ListOrganizationEvents": { + "methods": [ + "list_organization_events" + ] + }, + "ListOrganizationImpacts": { + "methods": [ + "list_organization_impacts" + ] + } + } + }, + "rest": { + "libraryClient": "ServiceHealthClient", + "rpcs": { + "GetEvent": { + "methods": [ + "get_event" + ] + }, + "GetOrganizationEvent": { + "methods": [ + "get_organization_event" + ] + }, + "GetOrganizationImpact": { + "methods": [ + "get_organization_impact" + ] + }, + "ListEvents": { + "methods": [ + "list_events" + ] + }, + "ListOrganizationEvents": { + "methods": [ + "list_organization_events" + ] + }, + "ListOrganizationImpacts": { + "methods": [ + "list_organization_impacts" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py new file mode 100644 index 000000000000..a7d39deb7a45 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/py.typed b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/py.typed new file mode 100644 index 000000000000..e1cbbeac17a3 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-servicehealth package uses inline types. diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/__init__.py new file mode 100644 index 000000000000..6c2671aa3cc3 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ServiceHealthAsyncClient +from .client import ServiceHealthClient + +__all__ = ( + "ServiceHealthClient", + "ServiceHealthAsyncClient", +) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py new file mode 100644 index 000000000000..b51bf0e61c3f --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/async_client.py @@ -0,0 +1,1109 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicehealth_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.servicehealth_v1.services.service_health import pagers +from google.cloud.servicehealth_v1.types import event_resources + +from .client import ServiceHealthClient +from .transports.base import DEFAULT_CLIENT_INFO, ServiceHealthTransport +from .transports.grpc_asyncio import ServiceHealthGrpcAsyncIOTransport + + +class ServiceHealthAsyncClient: + """Request service health events relevant to your Google Cloud + project. + """ + + _client: ServiceHealthClient + + DEFAULT_ENDPOINT = ServiceHealthClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServiceHealthClient.DEFAULT_MTLS_ENDPOINT + + event_path = staticmethod(ServiceHealthClient.event_path) + parse_event_path = staticmethod(ServiceHealthClient.parse_event_path) + organization_event_path = staticmethod(ServiceHealthClient.organization_event_path) + parse_organization_event_path = staticmethod( + ServiceHealthClient.parse_organization_event_path + ) + organization_impact_path = staticmethod( + ServiceHealthClient.organization_impact_path + ) + parse_organization_impact_path = staticmethod( + ServiceHealthClient.parse_organization_impact_path + ) + common_billing_account_path = staticmethod( + ServiceHealthClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ServiceHealthClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ServiceHealthClient.common_folder_path) + parse_common_folder_path = staticmethod( + ServiceHealthClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ServiceHealthClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ServiceHealthClient.parse_common_organization_path + ) + common_project_path = staticmethod(ServiceHealthClient.common_project_path) + parse_common_project_path = staticmethod( + ServiceHealthClient.parse_common_project_path + ) + common_location_path = staticmethod(ServiceHealthClient.common_location_path) + parse_common_location_path = staticmethod( + ServiceHealthClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceHealthAsyncClient: The constructed client. + """ + return ServiceHealthClient.from_service_account_info.__func__(ServiceHealthAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceHealthAsyncClient: The constructed client. + """ + return ServiceHealthClient.from_service_account_file.__func__(ServiceHealthAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServiceHealthClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServiceHealthTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceHealthTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ServiceHealthClient).get_transport_class, type(ServiceHealthClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ServiceHealthTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service health client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ServiceHealthTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServiceHealthClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_events( + self, + request: Optional[Union[event_resources.ListEventsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEventsAsyncPager: + r"""Lists events under a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_list_events(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_events(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.ListEventsRequest, dict]]): + The request object. + parent (:class:`str`): + Required. Parent value using the form + ``projects/{project_id}/locations/{location}/events``. + + ``project_id`` - ID of the project for which to list + service health events. ``location`` - The location to + get the service health events from. To retrieve service + health events of category = INCIDENT, use ``location`` = + ``global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListEventsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.ListEventsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_events, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEventsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_event( + self, + request: Optional[Union[event_resources.GetEventRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.Event: + r"""Retrieves a resource containing information about an + event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_get_event(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetEventRequest( + name="name_value", + ) + + # Make the request + response = await client.get_event(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.GetEventRequest, dict]]): + The request object. Message for getting an event + name (:class:`str`): + Required. Unique name of the event in this scope + including project and location using the form + ``projects/{project_id}/locations/{location}/events/{event_id}``. + + ``project_id`` - Project ID of the project that contains + the event. ``location`` - The location to get the + service health events from. ``event_id`` - Event ID to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.Event: + Represents service health events that + may affect Google Cloud products. Event + resource is a read-only view and does + not allow any modifications. All fields + are output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.GetEventRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_event, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_organization_events( + self, + request: Optional[ + Union[event_resources.ListOrganizationEventsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrganizationEventsAsyncPager: + r"""Lists organization events under a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_list_organization_events(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_events(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest, dict]]): + The request object. + parent (:class:`str`): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationEvents``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``location`` - The location to get the service health + events from. To retrieve service health events of + category = INCIDENT, use ``location`` = ``global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationEventsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.ListOrganizationEventsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_organization_events, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOrganizationEventsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_organization_event( + self, + request: Optional[ + Union[event_resources.GetOrganizationEventRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationEvent: + r"""Retrieves a resource containing information about an + event affecting an organization . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_get_organization_event(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationEventRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization_event(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.GetOrganizationEventRequest, dict]]): + The request object. + name (:class:`str`): + Required. Unique name of the event in this scope + including organization and event ID using the form + ``organizations/{organization_id}/locations/locations/global/organizationEvents/{event_id}``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``event_id`` - Organization event ID to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.OrganizationEvent: + Represents service health events that + may affect Google Cloud products used + across the organization. It is a + read-only view and does not allow any + modifications. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.GetOrganizationEventRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_organization_event, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_organization_impacts( + self, + request: Optional[ + Union[event_resources.ListOrganizationImpactsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrganizationImpactsAsyncPager: + r"""Lists assets impacted by organization events under a + given organization and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_list_organization_impacts(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationImpactsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_impacts(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest, dict]]): + The request object. Message for requesting list of + OrganizationImpacts + parent (:class:`str`): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationImpacts``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationImpactsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.ListOrganizationImpactsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_organization_impacts, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOrganizationImpactsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_organization_impact( + self, + request: Optional[ + Union[event_resources.GetOrganizationImpactRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationImpact: + r"""Retrieves a resource containing information about + impact to an asset under an organization affected by a + service health event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + async def sample_get_organization_impact(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationImpactRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization_impact(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.servicehealth_v1.types.GetOrganizationImpactRequest, dict]]): + The request object. + name (:class:`str`): + Required. Name of the resource using the form + ``organizations/{organization_id}/locations/global/organizationImpacts/{organization_impact_id}``. + + ``organization_id`` - ID (number) of the organization + that contains the event. To get your + ``organization_id``, see `Getting your organization + resource + ID `__. + ``organization_impact_id`` - ID of the + `OrganizationImpact + resource `__. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.OrganizationImpact: + Represents impact to assets at + organizational level. It is a read-only + view and does not allow any + modifications. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = event_resources.GetOrganizationImpactRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_organization_impact, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServiceHealthAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ServiceHealthAsyncClient",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py new file mode 100644 index 000000000000..67869d49eae3 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/client.py @@ -0,0 +1,1325 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicehealth_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.servicehealth_v1.services.service_health import pagers +from google.cloud.servicehealth_v1.types import event_resources + +from .transports.base import DEFAULT_CLIENT_INFO, ServiceHealthTransport +from .transports.grpc import ServiceHealthGrpcTransport +from .transports.grpc_asyncio import ServiceHealthGrpcAsyncIOTransport +from .transports.rest import ServiceHealthRestTransport + + +class ServiceHealthClientMeta(type): + """Metaclass for the ServiceHealth client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceHealthTransport]] + _transport_registry["grpc"] = ServiceHealthGrpcTransport + _transport_registry["grpc_asyncio"] = ServiceHealthGrpcAsyncIOTransport + _transport_registry["rest"] = ServiceHealthRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ServiceHealthTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceHealthClient(metaclass=ServiceHealthClientMeta): + """Request service health events relevant to your Google Cloud + project. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "servicehealth.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceHealthClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceHealthClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceHealthTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceHealthTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def event_path( + project: str, + location: str, + event: str, + ) -> str: + """Returns a fully-qualified event string.""" + return "projects/{project}/locations/{location}/events/{event}".format( + project=project, + location=location, + event=event, + ) + + @staticmethod + def parse_event_path(path: str) -> Dict[str, str]: + """Parses a event path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/events/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def organization_event_path( + organization: str, + location: str, + event: str, + ) -> str: + """Returns a fully-qualified organization_event string.""" + return "organizations/{organization}/locations/{location}/organizationEvents/{event}".format( + organization=organization, + location=location, + event=event, + ) + + @staticmethod + def parse_organization_event_path(path: str) -> Dict[str, str]: + """Parses a organization_event path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/organizationEvents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def organization_impact_path( + organization: str, + location: str, + organization_impact: str, + ) -> str: + """Returns a fully-qualified organization_impact string.""" + return "organizations/{organization}/locations/{location}/organizationImpacts/{organization_impact}".format( + organization=organization, + location=location, + organization_impact=organization_impact, + ) + + @staticmethod + def parse_organization_impact_path(path: str) -> Dict[str, str]: + """Parses a organization_impact path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/organizationImpacts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceHealthTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service health client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ServiceHealthTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ServiceHealthTransport): + # transport is a ServiceHealthTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_events( + self, + request: Optional[Union[event_resources.ListEventsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEventsPager: + r"""Lists events under a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_list_events(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_events(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.ListEventsRequest, dict]): + The request object. + parent (str): + Required. Parent value using the form + ``projects/{project_id}/locations/{location}/events``. + + ``project_id`` - ID of the project for which to list + service health events. ``location`` - The location to + get the service health events from. To retrieve service + health events of category = INCIDENT, use ``location`` = + ``global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListEventsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.ListEventsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.ListEventsRequest): + request = event_resources.ListEventsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEventsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_event( + self, + request: Optional[Union[event_resources.GetEventRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.Event: + r"""Retrieves a resource containing information about an + event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_get_event(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetEventRequest( + name="name_value", + ) + + # Make the request + response = client.get_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.GetEventRequest, dict]): + The request object. Message for getting an event + name (str): + Required. Unique name of the event in this scope + including project and location using the form + ``projects/{project_id}/locations/{location}/events/{event_id}``. + + ``project_id`` - Project ID of the project that contains + the event. ``location`` - The location to get the + service health events from. ``event_id`` - Event ID to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.Event: + Represents service health events that + may affect Google Cloud products. Event + resource is a read-only view and does + not allow any modifications. All fields + are output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.GetEventRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.GetEventRequest): + request = event_resources.GetEventRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_organization_events( + self, + request: Optional[ + Union[event_resources.ListOrganizationEventsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrganizationEventsPager: + r"""Lists organization events under a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_list_organization_events(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_events(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest, dict]): + The request object. + parent (str): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationEvents``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``location`` - The location to get the service health + events from. To retrieve service health events of + category = INCIDENT, use ``location`` = ``global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationEventsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.ListOrganizationEventsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.ListOrganizationEventsRequest): + request = event_resources.ListOrganizationEventsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_organization_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOrganizationEventsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_organization_event( + self, + request: Optional[ + Union[event_resources.GetOrganizationEventRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationEvent: + r"""Retrieves a resource containing information about an + event affecting an organization . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_get_organization_event(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationEventRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.GetOrganizationEventRequest, dict]): + The request object. + name (str): + Required. Unique name of the event in this scope + including organization and event ID using the form + ``organizations/{organization_id}/locations/locations/global/organizationEvents/{event_id}``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``event_id`` - Organization event ID to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.OrganizationEvent: + Represents service health events that + may affect Google Cloud products used + across the organization. It is a + read-only view and does not allow any + modifications. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.GetOrganizationEventRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.GetOrganizationEventRequest): + request = event_resources.GetOrganizationEventRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_organization_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_organization_impacts( + self, + request: Optional[ + Union[event_resources.ListOrganizationImpactsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOrganizationImpactsPager: + r"""Lists assets impacted by organization events under a + given organization and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_list_organization_impacts(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationImpactsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_impacts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest, dict]): + The request object. Message for requesting list of + OrganizationImpacts + parent (str): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationImpacts``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationImpactsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.ListOrganizationImpactsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.ListOrganizationImpactsRequest): + request = event_resources.ListOrganizationImpactsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_organization_impacts + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOrganizationImpactsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_organization_impact( + self, + request: Optional[ + Union[event_resources.GetOrganizationImpactRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationImpact: + r"""Retrieves a resource containing information about + impact to an asset under an organization affected by a + service health event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import servicehealth_v1 + + def sample_get_organization_impact(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationImpactRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization_impact(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.servicehealth_v1.types.GetOrganizationImpactRequest, dict]): + The request object. + name (str): + Required. Name of the resource using the form + ``organizations/{organization_id}/locations/global/organizationImpacts/{organization_impact_id}``. + + ``organization_id`` - ID (number) of the organization + that contains the event. To get your + ``organization_id``, see `Getting your organization + resource + ID `__. + ``organization_impact_id`` - ID of the + `OrganizationImpact + resource `__. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.servicehealth_v1.types.OrganizationImpact: + Represents impact to assets at + organizational level. It is a read-only + view and does not allow any + modifications. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a event_resources.GetOrganizationImpactRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, event_resources.GetOrganizationImpactRequest): + request = event_resources.GetOrganizationImpactRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_organization_impact] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceHealthClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ServiceHealthClient",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/pagers.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/pagers.py new file mode 100644 index 000000000000..6138ca8c4562 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/pagers.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.servicehealth_v1.types import event_resources + + +class ListEventsPager: + """A pager for iterating through ``list_events`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListEventsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``events`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEvents`` requests and continue to iterate + through the ``events`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., event_resources.ListEventsResponse], + request: event_resources.ListEventsRequest, + response: event_resources.ListEventsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListEventsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListEventsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListEventsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[event_resources.ListEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[event_resources.Event]: + for page in self.pages: + yield from page.events + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEventsAsyncPager: + """A pager for iterating through ``list_events`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListEventsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``events`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEvents`` requests and continue to iterate + through the ``events`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[event_resources.ListEventsResponse]], + request: event_resources.ListEventsRequest, + response: event_resources.ListEventsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListEventsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListEventsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListEventsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[event_resources.ListEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[event_resources.Event]: + async def async_generator(): + async for page in self.pages: + for response in page.events: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrganizationEventsPager: + """A pager for iterating through ``list_organization_events`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``organization_events`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOrganizationEvents`` requests and continue to iterate + through the ``organization_events`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., event_resources.ListOrganizationEventsResponse], + request: event_resources.ListOrganizationEventsRequest, + response: event_resources.ListOrganizationEventsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListOrganizationEventsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[event_resources.ListOrganizationEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[event_resources.OrganizationEvent]: + for page in self.pages: + yield from page.organization_events + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrganizationEventsAsyncPager: + """A pager for iterating through ``list_organization_events`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``organization_events`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOrganizationEvents`` requests and continue to iterate + through the ``organization_events`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[event_resources.ListOrganizationEventsResponse] + ], + request: event_resources.ListOrganizationEventsRequest, + response: event_resources.ListOrganizationEventsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListOrganizationEventsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListOrganizationEventsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[event_resources.ListOrganizationEventsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[event_resources.OrganizationEvent]: + async def async_generator(): + async for page in self.pages: + for response in page.organization_events: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrganizationImpactsPager: + """A pager for iterating through ``list_organization_impacts`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``organization_impacts`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOrganizationImpacts`` requests and continue to iterate + through the ``organization_impacts`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., event_resources.ListOrganizationImpactsResponse], + request: event_resources.ListOrganizationImpactsRequest, + response: event_resources.ListOrganizationImpactsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListOrganizationImpactsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[event_resources.ListOrganizationImpactsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[event_resources.OrganizationImpact]: + for page in self.pages: + yield from page.organization_impacts + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrganizationImpactsAsyncPager: + """A pager for iterating through ``list_organization_impacts`` requests. + + This class thinly wraps an initial + :class:`google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``organization_impacts`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOrganizationImpacts`` requests and continue to iterate + through the ``organization_impacts`` field on the + corresponding responses. + + All the usual :class:`google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[event_resources.ListOrganizationImpactsResponse] + ], + request: event_resources.ListOrganizationImpactsRequest, + response: event_resources.ListOrganizationImpactsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest): + The initial request object. + response (google.cloud.servicehealth_v1.types.ListOrganizationImpactsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = event_resources.ListOrganizationImpactsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[event_resources.ListOrganizationImpactsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[event_resources.OrganizationImpact]: + async def async_generator(): + async for page in self.pages: + for response in page.organization_impacts: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/__init__.py new file mode 100644 index 000000000000..58c1b86bc6f2 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceHealthTransport +from .grpc import ServiceHealthGrpcTransport +from .grpc_asyncio import ServiceHealthGrpcAsyncIOTransport +from .rest import ServiceHealthRestInterceptor, ServiceHealthRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceHealthTransport]] +_transport_registry["grpc"] = ServiceHealthGrpcTransport +_transport_registry["grpc_asyncio"] = ServiceHealthGrpcAsyncIOTransport +_transport_registry["rest"] = ServiceHealthRestTransport + +__all__ = ( + "ServiceHealthTransport", + "ServiceHealthGrpcTransport", + "ServiceHealthGrpcAsyncIOTransport", + "ServiceHealthRestTransport", + "ServiceHealthRestInterceptor", +) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/base.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/base.py new file mode 100644 index 000000000000..5cb7de5221d5 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/base.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.servicehealth_v1 import gapic_version as package_version +from google.cloud.servicehealth_v1.types import event_resources + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ServiceHealthTransport(abc.ABC): + """Abstract transport class for ServiceHealth.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "servicehealth.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_events: gapic_v1.method.wrap_method( + self.list_events, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_event: gapic_v1.method.wrap_method( + self.get_event, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_organization_events: gapic_v1.method.wrap_method( + self.list_organization_events, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_organization_event: gapic_v1.method.wrap_method( + self.get_organization_event, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_organization_impacts: gapic_v1.method.wrap_method( + self.list_organization_impacts, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_organization_impact: gapic_v1.method.wrap_method( + self.get_organization_impact, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_events( + self, + ) -> Callable[ + [event_resources.ListEventsRequest], + Union[ + event_resources.ListEventsResponse, + Awaitable[event_resources.ListEventsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_event( + self, + ) -> Callable[ + [event_resources.GetEventRequest], + Union[event_resources.Event, Awaitable[event_resources.Event]], + ]: + raise NotImplementedError() + + @property + def list_organization_events( + self, + ) -> Callable[ + [event_resources.ListOrganizationEventsRequest], + Union[ + event_resources.ListOrganizationEventsResponse, + Awaitable[event_resources.ListOrganizationEventsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_organization_event( + self, + ) -> Callable[ + [event_resources.GetOrganizationEventRequest], + Union[ + event_resources.OrganizationEvent, + Awaitable[event_resources.OrganizationEvent], + ], + ]: + raise NotImplementedError() + + @property + def list_organization_impacts( + self, + ) -> Callable[ + [event_resources.ListOrganizationImpactsRequest], + Union[ + event_resources.ListOrganizationImpactsResponse, + Awaitable[event_resources.ListOrganizationImpactsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_organization_impact( + self, + ) -> Callable[ + [event_resources.GetOrganizationImpactRequest], + Union[ + event_resources.OrganizationImpact, + Awaitable[event_resources.OrganizationImpact], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ServiceHealthTransport",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc.py new file mode 100644 index 000000000000..b6c30d7d5dff --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.servicehealth_v1.types import event_resources + +from .base import DEFAULT_CLIENT_INFO, ServiceHealthTransport + + +class ServiceHealthGrpcTransport(ServiceHealthTransport): + """gRPC backend transport for ServiceHealth. + + Request service health events relevant to your Google Cloud + project. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "servicehealth.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "servicehealth.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_events( + self, + ) -> Callable[ + [event_resources.ListEventsRequest], event_resources.ListEventsResponse + ]: + r"""Return a callable for the list events method over gRPC. + + Lists events under a given project and location. + + Returns: + Callable[[~.ListEventsRequest], + ~.ListEventsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_events" not in self._stubs: + self._stubs["list_events"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListEvents", + request_serializer=event_resources.ListEventsRequest.serialize, + response_deserializer=event_resources.ListEventsResponse.deserialize, + ) + return self._stubs["list_events"] + + @property + def get_event( + self, + ) -> Callable[[event_resources.GetEventRequest], event_resources.Event]: + r"""Return a callable for the get event method over gRPC. + + Retrieves a resource containing information about an + event. + + Returns: + Callable[[~.GetEventRequest], + ~.Event]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_event" not in self._stubs: + self._stubs["get_event"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetEvent", + request_serializer=event_resources.GetEventRequest.serialize, + response_deserializer=event_resources.Event.deserialize, + ) + return self._stubs["get_event"] + + @property + def list_organization_events( + self, + ) -> Callable[ + [event_resources.ListOrganizationEventsRequest], + event_resources.ListOrganizationEventsResponse, + ]: + r"""Return a callable for the list organization events method over gRPC. + + Lists organization events under a given organization + and location. + + Returns: + Callable[[~.ListOrganizationEventsRequest], + ~.ListOrganizationEventsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organization_events" not in self._stubs: + self._stubs["list_organization_events"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListOrganizationEvents", + request_serializer=event_resources.ListOrganizationEventsRequest.serialize, + response_deserializer=event_resources.ListOrganizationEventsResponse.deserialize, + ) + return self._stubs["list_organization_events"] + + @property + def get_organization_event( + self, + ) -> Callable[ + [event_resources.GetOrganizationEventRequest], event_resources.OrganizationEvent + ]: + r"""Return a callable for the get organization event method over gRPC. + + Retrieves a resource containing information about an + event affecting an organization . + + Returns: + Callable[[~.GetOrganizationEventRequest], + ~.OrganizationEvent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization_event" not in self._stubs: + self._stubs["get_organization_event"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetOrganizationEvent", + request_serializer=event_resources.GetOrganizationEventRequest.serialize, + response_deserializer=event_resources.OrganizationEvent.deserialize, + ) + return self._stubs["get_organization_event"] + + @property + def list_organization_impacts( + self, + ) -> Callable[ + [event_resources.ListOrganizationImpactsRequest], + event_resources.ListOrganizationImpactsResponse, + ]: + r"""Return a callable for the list organization impacts method over gRPC. + + Lists assets impacted by organization events under a + given organization and location. + + Returns: + Callable[[~.ListOrganizationImpactsRequest], + ~.ListOrganizationImpactsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organization_impacts" not in self._stubs: + self._stubs["list_organization_impacts"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListOrganizationImpacts", + request_serializer=event_resources.ListOrganizationImpactsRequest.serialize, + response_deserializer=event_resources.ListOrganizationImpactsResponse.deserialize, + ) + return self._stubs["list_organization_impacts"] + + @property + def get_organization_impact( + self, + ) -> Callable[ + [event_resources.GetOrganizationImpactRequest], + event_resources.OrganizationImpact, + ]: + r"""Return a callable for the get organization impact method over gRPC. + + Retrieves a resource containing information about + impact to an asset under an organization affected by a + service health event. + + Returns: + Callable[[~.GetOrganizationImpactRequest], + ~.OrganizationImpact]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization_impact" not in self._stubs: + self._stubs["get_organization_impact"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetOrganizationImpact", + request_serializer=event_resources.GetOrganizationImpactRequest.serialize, + response_deserializer=event_resources.OrganizationImpact.deserialize, + ) + return self._stubs["get_organization_impact"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ServiceHealthGrpcTransport",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc_asyncio.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc_asyncio.py new file mode 100644 index 000000000000..efdd58165b64 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/grpc_asyncio.py @@ -0,0 +1,453 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.servicehealth_v1.types import event_resources + +from .base import DEFAULT_CLIENT_INFO, ServiceHealthTransport +from .grpc import ServiceHealthGrpcTransport + + +class ServiceHealthGrpcAsyncIOTransport(ServiceHealthTransport): + """gRPC AsyncIO backend transport for ServiceHealth. + + Request service health events relevant to your Google Cloud + project. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "servicehealth.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "servicehealth.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_events( + self, + ) -> Callable[ + [event_resources.ListEventsRequest], + Awaitable[event_resources.ListEventsResponse], + ]: + r"""Return a callable for the list events method over gRPC. + + Lists events under a given project and location. + + Returns: + Callable[[~.ListEventsRequest], + Awaitable[~.ListEventsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_events" not in self._stubs: + self._stubs["list_events"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListEvents", + request_serializer=event_resources.ListEventsRequest.serialize, + response_deserializer=event_resources.ListEventsResponse.deserialize, + ) + return self._stubs["list_events"] + + @property + def get_event( + self, + ) -> Callable[[event_resources.GetEventRequest], Awaitable[event_resources.Event]]: + r"""Return a callable for the get event method over gRPC. + + Retrieves a resource containing information about an + event. + + Returns: + Callable[[~.GetEventRequest], + Awaitable[~.Event]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_event" not in self._stubs: + self._stubs["get_event"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetEvent", + request_serializer=event_resources.GetEventRequest.serialize, + response_deserializer=event_resources.Event.deserialize, + ) + return self._stubs["get_event"] + + @property + def list_organization_events( + self, + ) -> Callable[ + [event_resources.ListOrganizationEventsRequest], + Awaitable[event_resources.ListOrganizationEventsResponse], + ]: + r"""Return a callable for the list organization events method over gRPC. + + Lists organization events under a given organization + and location. + + Returns: + Callable[[~.ListOrganizationEventsRequest], + Awaitable[~.ListOrganizationEventsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organization_events" not in self._stubs: + self._stubs["list_organization_events"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListOrganizationEvents", + request_serializer=event_resources.ListOrganizationEventsRequest.serialize, + response_deserializer=event_resources.ListOrganizationEventsResponse.deserialize, + ) + return self._stubs["list_organization_events"] + + @property + def get_organization_event( + self, + ) -> Callable[ + [event_resources.GetOrganizationEventRequest], + Awaitable[event_resources.OrganizationEvent], + ]: + r"""Return a callable for the get organization event method over gRPC. + + Retrieves a resource containing information about an + event affecting an organization . + + Returns: + Callable[[~.GetOrganizationEventRequest], + Awaitable[~.OrganizationEvent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization_event" not in self._stubs: + self._stubs["get_organization_event"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetOrganizationEvent", + request_serializer=event_resources.GetOrganizationEventRequest.serialize, + response_deserializer=event_resources.OrganizationEvent.deserialize, + ) + return self._stubs["get_organization_event"] + + @property + def list_organization_impacts( + self, + ) -> Callable[ + [event_resources.ListOrganizationImpactsRequest], + Awaitable[event_resources.ListOrganizationImpactsResponse], + ]: + r"""Return a callable for the list organization impacts method over gRPC. + + Lists assets impacted by organization events under a + given organization and location. + + Returns: + Callable[[~.ListOrganizationImpactsRequest], + Awaitable[~.ListOrganizationImpactsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organization_impacts" not in self._stubs: + self._stubs["list_organization_impacts"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/ListOrganizationImpacts", + request_serializer=event_resources.ListOrganizationImpactsRequest.serialize, + response_deserializer=event_resources.ListOrganizationImpactsResponse.deserialize, + ) + return self._stubs["list_organization_impacts"] + + @property + def get_organization_impact( + self, + ) -> Callable[ + [event_resources.GetOrganizationImpactRequest], + Awaitable[event_resources.OrganizationImpact], + ]: + r"""Return a callable for the get organization impact method over gRPC. + + Retrieves a resource containing information about + impact to an asset under an organization affected by a + service health event. + + Returns: + Callable[[~.GetOrganizationImpactRequest], + Awaitable[~.OrganizationImpact]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization_impact" not in self._stubs: + self._stubs["get_organization_impact"] = self.grpc_channel.unary_unary( + "/google.cloud.servicehealth.v1.ServiceHealth/GetOrganizationImpact", + request_serializer=event_resources.GetOrganizationImpactRequest.serialize, + response_deserializer=event_resources.OrganizationImpact.deserialize, + ) + return self._stubs["get_organization_impact"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ServiceHealthGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/rest.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/rest.py new file mode 100644 index 000000000000..7178eb40d544 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/services/service_health/transports/rest.py @@ -0,0 +1,1145 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.servicehealth_v1.types import event_resources + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ServiceHealthTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ServiceHealthRestInterceptor: + """Interceptor for ServiceHealth. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceHealthRestTransport. + + .. code-block:: python + class MyCustomServiceHealthInterceptor(ServiceHealthRestInterceptor): + def pre_get_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_event(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_organization_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization_event(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_organization_impact(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization_impact(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_events(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_organization_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_organization_events(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_organization_impacts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_organization_impacts(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceHealthRestTransport(interceptor=MyCustomServiceHealthInterceptor()) + client = ServiceHealthClient(transport=transport) + + + """ + + def pre_get_event( + self, + request: event_resources.GetEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[event_resources.GetEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_get_event(self, response: event_resources.Event) -> event_resources.Event: + """Post-rpc interceptor for get_event + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_get_organization_event( + self, + request: event_resources.GetOrganizationEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[event_resources.GetOrganizationEventRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_organization_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_get_organization_event( + self, response: event_resources.OrganizationEvent + ) -> event_resources.OrganizationEvent: + """Post-rpc interceptor for get_organization_event + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_get_organization_impact( + self, + request: event_resources.GetOrganizationImpactRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[event_resources.GetOrganizationImpactRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_organization_impact + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_get_organization_impact( + self, response: event_resources.OrganizationImpact + ) -> event_resources.OrganizationImpact: + """Post-rpc interceptor for get_organization_impact + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_list_events( + self, + request: event_resources.ListEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[event_resources.ListEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_list_events( + self, response: event_resources.ListEventsResponse + ) -> event_resources.ListEventsResponse: + """Post-rpc interceptor for list_events + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_list_organization_events( + self, + request: event_resources.ListOrganizationEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + event_resources.ListOrganizationEventsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_organization_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_list_organization_events( + self, response: event_resources.ListOrganizationEventsResponse + ) -> event_resources.ListOrganizationEventsResponse: + """Post-rpc interceptor for list_organization_events + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_list_organization_impacts( + self, + request: event_resources.ListOrganizationImpactsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + event_resources.ListOrganizationImpactsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_organization_impacts + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_list_organization_impacts( + self, response: event_resources.ListOrganizationImpactsResponse + ) -> event_resources.ListOrganizationImpactsResponse: + """Post-rpc interceptor for list_organization_impacts + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceHealth server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the ServiceHealth server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceHealthRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceHealthRestInterceptor + + +class ServiceHealthRestTransport(ServiceHealthTransport): + """REST backend transport for ServiceHealth. + + Request service health events relevant to your Google Cloud + project. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "servicehealth.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ServiceHealthRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceHealthRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetEvent(ServiceHealthRestStub): + def __hash__(self): + return hash("GetEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.GetEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.Event: + r"""Call the get event method over HTTP. + + Args: + request (~.event_resources.GetEventRequest): + The request object. Message for getting an event + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.Event: + Represents service health events that + may affect Google Cloud products. Event + resource is a read-only view and does + not allow any modifications. All fields + are output only. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/events/*}", + }, + ] + request, metadata = self._interceptor.pre_get_event(request, metadata) + pb_request = event_resources.GetEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.Event() + pb_resp = event_resources.Event.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_event(resp) + return resp + + class _GetOrganizationEvent(ServiceHealthRestStub): + def __hash__(self): + return hash("GetOrganizationEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.GetOrganizationEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationEvent: + r"""Call the get organization event method over HTTP. + + Args: + request (~.event_resources.GetOrganizationEventRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.OrganizationEvent: + Represents service health events that + may affect Google Cloud products used + across the organization. It is a + read-only view and does not allow any + modifications. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/organizationEvents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization_event( + request, metadata + ) + pb_request = event_resources.GetOrganizationEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.OrganizationEvent() + pb_resp = event_resources.OrganizationEvent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization_event(resp) + return resp + + class _GetOrganizationImpact(ServiceHealthRestStub): + def __hash__(self): + return hash("GetOrganizationImpact") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.GetOrganizationImpactRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.OrganizationImpact: + r"""Call the get organization impact method over HTTP. + + Args: + request (~.event_resources.GetOrganizationImpactRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.OrganizationImpact: + Represents impact to assets at + organizational level. It is a read-only + view and does not allow any + modifications. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/organizationImpacts/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization_impact( + request, metadata + ) + pb_request = event_resources.GetOrganizationImpactRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.OrganizationImpact() + pb_resp = event_resources.OrganizationImpact.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization_impact(resp) + return resp + + class _ListEvents(ServiceHealthRestStub): + def __hash__(self): + return hash("ListEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.ListEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.ListEventsResponse: + r"""Call the list events method over HTTP. + + Args: + request (~.event_resources.ListEventsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.ListEventsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/events", + }, + ] + request, metadata = self._interceptor.pre_list_events(request, metadata) + pb_request = event_resources.ListEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.ListEventsResponse() + pb_resp = event_resources.ListEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_events(resp) + return resp + + class _ListOrganizationEvents(ServiceHealthRestStub): + def __hash__(self): + return hash("ListOrganizationEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.ListOrganizationEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.ListOrganizationEventsResponse: + r"""Call the list organization events method over HTTP. + + Args: + request (~.event_resources.ListOrganizationEventsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.ListOrganizationEventsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/organizationEvents", + }, + ] + request, metadata = self._interceptor.pre_list_organization_events( + request, metadata + ) + pb_request = event_resources.ListOrganizationEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.ListOrganizationEventsResponse() + pb_resp = event_resources.ListOrganizationEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_organization_events(resp) + return resp + + class _ListOrganizationImpacts(ServiceHealthRestStub): + def __hash__(self): + return hash("ListOrganizationImpacts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: event_resources.ListOrganizationImpactsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> event_resources.ListOrganizationImpactsResponse: + r"""Call the list organization impacts method over HTTP. + + Args: + request (~.event_resources.ListOrganizationImpactsRequest): + The request object. Message for requesting list of + OrganizationImpacts + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.event_resources.ListOrganizationImpactsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/organizationImpacts", + }, + ] + request, metadata = self._interceptor.pre_list_organization_impacts( + request, metadata + ) + pb_request = event_resources.ListOrganizationImpactsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = event_resources.ListOrganizationImpactsResponse() + pb_resp = event_resources.ListOrganizationImpactsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_organization_impacts(resp) + return resp + + @property + def get_event( + self, + ) -> Callable[[event_resources.GetEventRequest], event_resources.Event]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization_event( + self, + ) -> Callable[ + [event_resources.GetOrganizationEventRequest], event_resources.OrganizationEvent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganizationEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization_impact( + self, + ) -> Callable[ + [event_resources.GetOrganizationImpactRequest], + event_resources.OrganizationImpact, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganizationImpact(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_events( + self, + ) -> Callable[ + [event_resources.ListEventsRequest], event_resources.ListEventsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEvents(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_organization_events( + self, + ) -> Callable[ + [event_resources.ListOrganizationEventsRequest], + event_resources.ListOrganizationEventsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOrganizationEvents(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_organization_impacts( + self, + ) -> Callable[ + [event_resources.ListOrganizationImpactsRequest], + event_resources.ListOrganizationImpactsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOrganizationImpacts(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ServiceHealthRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ServiceHealthRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ServiceHealthRestTransport",) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/__init__.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/__init__.py new file mode 100644 index 000000000000..ba07502ee58b --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/__init__.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .event_resources import ( + Asset, + Event, + EventImpact, + EventUpdate, + EventView, + GetEventRequest, + GetOrganizationEventRequest, + GetOrganizationImpactRequest, + ListEventsRequest, + ListEventsResponse, + ListOrganizationEventsRequest, + ListOrganizationEventsResponse, + ListOrganizationImpactsRequest, + ListOrganizationImpactsResponse, + Location, + OrganizationEvent, + OrganizationEventView, + OrganizationImpact, + Product, +) + +__all__ = ( + "Asset", + "Event", + "EventImpact", + "EventUpdate", + "GetEventRequest", + "GetOrganizationEventRequest", + "GetOrganizationImpactRequest", + "ListEventsRequest", + "ListEventsResponse", + "ListOrganizationEventsRequest", + "ListOrganizationEventsResponse", + "ListOrganizationImpactsRequest", + "ListOrganizationImpactsResponse", + "Location", + "OrganizationEvent", + "OrganizationImpact", + "Product", + "EventView", + "OrganizationEventView", +) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_resources.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_resources.py new file mode 100644 index 000000000000..87cbd68f7996 --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_resources.py @@ -0,0 +1,1126 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.servicehealth.v1", + manifest={ + "EventView", + "OrganizationEventView", + "Event", + "OrganizationEvent", + "EventUpdate", + "Location", + "Product", + "EventImpact", + "OrganizationImpact", + "Asset", + "ListEventsRequest", + "ListEventsResponse", + "GetEventRequest", + "ListOrganizationEventsRequest", + "ListOrganizationEventsResponse", + "GetOrganizationEventRequest", + "ListOrganizationImpactsRequest", + "ListOrganizationImpactsResponse", + "GetOrganizationImpactRequest", + }, +) + + +class EventView(proto.Enum): + r"""The event fields to include in ListEvents API response. This + enum lists all possible event views. + + Values: + EVENT_VIEW_UNSPECIFIED (0): + Unspecified event view. Default to ``EVENT_VIEW_BASIC``. + EVENT_VIEW_BASIC (1): + Includes all fields except ``updates``. This view is the + default for ListEvents API. + EVENT_VIEW_FULL (2): + Includes all event fields. + """ + EVENT_VIEW_UNSPECIFIED = 0 + EVENT_VIEW_BASIC = 1 + EVENT_VIEW_FULL = 2 + + +class OrganizationEventView(proto.Enum): + r"""The organization event fields to include in + ListOrganizationEvents API response. This enum lists all + possible organization event views. + + Values: + ORGANIZATION_EVENT_VIEW_UNSPECIFIED (0): + Unspecified event view. Default to + ``ORGANIZATION_EVENT_VIEW_BASIC``. + ORGANIZATION_EVENT_VIEW_BASIC (1): + Includes all organization event fields except ``updates``. + This view is the default for ListOrganizationEvents API. + ORGANIZATION_EVENT_VIEW_FULL (2): + Includes all organization event fields. + """ + ORGANIZATION_EVENT_VIEW_UNSPECIFIED = 0 + ORGANIZATION_EVENT_VIEW_BASIC = 1 + ORGANIZATION_EVENT_VIEW_FULL = 2 + + +class Event(proto.Message): + r"""Represents service health events that may affect Google Cloud + products. Event resource is a read-only view and does not allow + any modifications. All fields are output only. + + Attributes: + name (str): + Output only. Identifier. Name of the event. Unique name of + the event in this scope including project and location using + the form + ``projects/{project_id}/locations/{location}/events/{event_id}``. + title (str): + Output only. Brief description for the event. + description (str): + Output only. Free-form, human-readable + description. + category (google.cloud.servicehealth_v1.types.Event.EventCategory): + Output only. The category of the event. + detailed_category (google.cloud.servicehealth_v1.types.Event.DetailedCategory): + Output only. The detailed category of the + event. + state (google.cloud.servicehealth_v1.types.Event.State): + Output only. The current state of the event. + detailed_state (google.cloud.servicehealth_v1.types.Event.DetailedState): + Output only. The current detailed state of + the incident. + event_impacts (MutableSequence[google.cloud.servicehealth_v1.types.EventImpact]): + Google Cloud products and locations impacted + by the event. + relevance (google.cloud.servicehealth_v1.types.Event.Relevance): + Output only. Communicates why a given event + is deemed relevant in the context of a given + project. + updates (MutableSequence[google.cloud.servicehealth_v1.types.EventUpdate]): + Output only. Event updates are correspondence + from Google. + parent_event (str): + Output only. When ``detailed_state``\ =\ ``MERGED``, + ``parent_event`` contains the name of the parent event. All + further updates will be published to the parent event. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the event was last + modified. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of the event, if + applicable. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of the event, if + applicable. + next_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next update + can be expected. + """ + + class EventCategory(proto.Enum): + r"""The category of the event. This enum lists all possible + categories of event. + + Values: + EVENT_CATEGORY_UNSPECIFIED (0): + Unspecified category. + INCIDENT (2): + Event category for service outage or + degradation. + """ + EVENT_CATEGORY_UNSPECIFIED = 0 + INCIDENT = 2 + + class DetailedCategory(proto.Enum): + r"""The detailed category of an event. Contains all possible + states for all event categories. + + Values: + DETAILED_CATEGORY_UNSPECIFIED (0): + Unspecified detailed category. + CONFIRMED_INCIDENT (1): + Indicates an event with category INCIDENT has + a confirmed impact to at least one Google Cloud + product. + EMERGING_INCIDENT (2): + Indicates an event with category INCIDENT is + under investigation to determine if it has a + confirmed impact on any Google Cloud products. + """ + DETAILED_CATEGORY_UNSPECIFIED = 0 + CONFIRMED_INCIDENT = 1 + EMERGING_INCIDENT = 2 + + class State(proto.Enum): + r"""The state of the event. This enum lists all possible states + of event. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + ACTIVE (1): + Event is actively affecting a Google Cloud + product and will continue to receive updates. + CLOSED (2): + Event is no longer affecting the Google Cloud + product or has been merged with another event. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOSED = 2 + + class DetailedState(proto.Enum): + r"""The detailed state of the incident. This enum lists all + possible detailed states of an incident. + + Values: + DETAILED_STATE_UNSPECIFIED (0): + Unspecified detail state. + EMERGING (1): + Google engineers are actively investigating + the event to determine the impact. + CONFIRMED (2): + The incident is confirmed and impacting at + least one Google Cloud product. Ongoing status + updates will be provided until it is resolved. + RESOLVED (3): + The incident is no longer affecting any + Google Cloud product, and there will be no + further updates. + MERGED (4): + The incident was merged into a parent incident. All further + updates will be published to the parent only. The + ``parent_event`` field contains the name of the parent. + AUTO_CLOSED (9): + The incident was automatically closed because + the issues couldn’t be confirmed or is no longer + impacting Google Cloud Products and/or + Locations. + FALSE_POSITIVE (10): + The incident was verified as non-impactful. + No further action required. + """ + DETAILED_STATE_UNSPECIFIED = 0 + EMERGING = 1 + CONFIRMED = 2 + RESOLVED = 3 + MERGED = 4 + AUTO_CLOSED = 9 + FALSE_POSITIVE = 10 + + class Relevance(proto.Enum): + r"""Communicates why a given incident is deemed relevant in the + context of a given project. This enum lists all possible + detailed states of relevance. + + Values: + RELEVANCE_UNSPECIFIED (0): + Unspecified relevance. + UNKNOWN (2): + The relevance of the incident to the project + is unknown. + NOT_IMPACTED (6): + The incident does not impact the project. + PARTIALLY_RELATED (7): + The incident is associated with a Google + Cloud product your project uses, but the + incident may not be impacting your project. For + example, the incident may be impacting a Google + Cloud product that your project uses, but in a + location that your project does not use. + RELATED (8): + The incident has a direct connection with + your project and impacts a Google Cloud product + in a location your project uses. + IMPACTED (9): + The incident is verified to be impacting your + project. + """ + RELEVANCE_UNSPECIFIED = 0 + UNKNOWN = 2 + NOT_IMPACTED = 6 + PARTIALLY_RELATED = 7 + RELATED = 8 + IMPACTED = 9 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + category: EventCategory = proto.Field( + proto.ENUM, + number=4, + enum=EventCategory, + ) + detailed_category: DetailedCategory = proto.Field( + proto.ENUM, + number=21, + enum=DetailedCategory, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + detailed_state: DetailedState = proto.Field( + proto.ENUM, + number=19, + enum=DetailedState, + ) + event_impacts: MutableSequence["EventImpact"] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message="EventImpact", + ) + relevance: Relevance = proto.Field( + proto.ENUM, + number=8, + enum=Relevance, + ) + updates: MutableSequence["EventUpdate"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="EventUpdate", + ) + parent_event: str = proto.Field( + proto.STRING, + number=10, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + next_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + + +class OrganizationEvent(proto.Message): + r"""Represents service health events that may affect Google Cloud + products used across the organization. It is a read-only view + and does not allow any modifications. + + Attributes: + name (str): + Output only. Identifier. Name of the event. Unique name of + the event in this scope including organization ID and + location using the form + ``organizations/{organization_id}/locations/{location}/organizationEvents/{event_id}``. + + ``organization_id`` - see `Getting your organization + resource + ID `__. + ``location`` - The location to get the service health events + from. ``event_id`` - Organization event ID to retrieve. + title (str): + Output only. Brief description for the event. + description (str): + Output only. Free-form, human-readable + description. + category (google.cloud.servicehealth_v1.types.OrganizationEvent.EventCategory): + Output only. The category of the event. + detailed_category (google.cloud.servicehealth_v1.types.OrganizationEvent.DetailedCategory): + Output only. The detailed category of the + event. + state (google.cloud.servicehealth_v1.types.OrganizationEvent.State): + Output only. The current state of the event. + detailed_state (google.cloud.servicehealth_v1.types.OrganizationEvent.DetailedState): + Output only. The current detailed state of + the incident. + event_impacts (MutableSequence[google.cloud.servicehealth_v1.types.EventImpact]): + Output only. Represents the Google Cloud + products and locations impacted by the event. + updates (MutableSequence[google.cloud.servicehealth_v1.types.EventUpdate]): + Output only. Incident-only field. Event + updates are correspondence from Google. + parent_event (str): + Output only. When ``detailed_state``\ =\ ``MERGED``, + ``parent_event`` contains the name of the parent event. All + further updates will be published to the parent event. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the update was posted. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of the event, if + applicable. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of the event, if + applicable. + next_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Incident-only field. The time + when the next update can be expected. + """ + + class EventCategory(proto.Enum): + r"""The category of the event. This enum lists all possible + categories of event. + + Values: + EVENT_CATEGORY_UNSPECIFIED (0): + Unspecified category. + INCIDENT (2): + Event category for service outage or + degradation. + """ + EVENT_CATEGORY_UNSPECIFIED = 0 + INCIDENT = 2 + + class DetailedCategory(proto.Enum): + r"""The detailed category of an event. Contains all possible + states for all event categories. + + Values: + DETAILED_CATEGORY_UNSPECIFIED (0): + Unspecified detailed category. + CONFIRMED_INCIDENT (1): + Indicates an event with category INCIDENT has + a confirmed impact to at least one Google Cloud + product. + EMERGING_INCIDENT (2): + Indicates an event with category INCIDENT is + under investigation to determine if it has a + confirmed impact on any Google Cloud products. + """ + DETAILED_CATEGORY_UNSPECIFIED = 0 + CONFIRMED_INCIDENT = 1 + EMERGING_INCIDENT = 2 + + class State(proto.Enum): + r"""The state of the organization event. This enum lists all + possible states of event. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + ACTIVE (1): + Event is actively affecting a Google Cloud + product and will continue to receive updates. + CLOSED (2): + Event is no longer affecting the Google Cloud + product or has been merged with another event. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOSED = 2 + + class DetailedState(proto.Enum): + r"""The detailed state of the incident. This enum lists all + possible detailed states of an incident. + + Values: + DETAILED_STATE_UNSPECIFIED (0): + Unspecified detail state. + EMERGING (1): + Google engineers are actively investigating + the incident to determine the impact. + CONFIRMED (2): + The incident is confirmed and impacting at + least one Google Cloud product. Ongoing status + updates will be provided until it is resolved. + RESOLVED (3): + The incident is no longer affecting any + Google Cloud product, and there will be no + further updates. + MERGED (4): + The incident was merged into a parent event. All further + updates will be published to the parent only. The + ``parent_event`` contains the name of the parent. + AUTO_CLOSED (9): + The incident was automatically closed because + the issues couldn’t be confirmed or is no longer + impacting Google Cloud Products and/or + Locations. + FALSE_POSITIVE (10): + The incident was verified as non-impactful. + No further action required. + """ + DETAILED_STATE_UNSPECIFIED = 0 + EMERGING = 1 + CONFIRMED = 2 + RESOLVED = 3 + MERGED = 4 + AUTO_CLOSED = 9 + FALSE_POSITIVE = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + category: EventCategory = proto.Field( + proto.ENUM, + number=4, + enum=EventCategory, + ) + detailed_category: DetailedCategory = proto.Field( + proto.ENUM, + number=17, + enum=DetailedCategory, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + detailed_state: DetailedState = proto.Field( + proto.ENUM, + number=16, + enum=DetailedState, + ) + event_impacts: MutableSequence["EventImpact"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="EventImpact", + ) + updates: MutableSequence["EventUpdate"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="EventUpdate", + ) + parent_event: str = proto.Field( + proto.STRING, + number=9, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + next_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class EventUpdate(proto.Message): + r"""Records an update made to the event. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the update was posted. + title (str): + Output only. Brief title for the event. + description (str): + Output only. Free-form, human-readable + description. + symptom (str): + Output only. Symptoms of the event, if + available. + workaround (str): + Output only. Workaround steps to remediate + the event impact, if available. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + title: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + symptom: str = proto.Field( + proto.STRING, + number=4, + ) + workaround: str = proto.Field( + proto.STRING, + number=5, + ) + + +class Location(proto.Message): + r"""Represents the locations impacted by the event. + + Attributes: + location_name (str): + Location impacted by the event. Example: ``"us-central1"`` + """ + + location_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Product(proto.Message): + r"""Represents the Google Cloud product impacted by the event. + + Attributes: + product_name (str): + Google Cloud product impacted by the event. Example: + ``"Google Cloud SQL"`` + """ + + product_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EventImpact(proto.Message): + r"""Represents the Google Cloud products and locations impacted + by the event. + + Attributes: + product (google.cloud.servicehealth_v1.types.Product): + Google Cloud product impacted by the event. + location (google.cloud.servicehealth_v1.types.Location): + Location impacted by the event. + """ + + product: "Product" = proto.Field( + proto.MESSAGE, + number=1, + message="Product", + ) + location: "Location" = proto.Field( + proto.MESSAGE, + number=2, + message="Location", + ) + + +class OrganizationImpact(proto.Message): + r"""Represents impact to assets at organizational level. It is a + read-only view and does not allow any modifications. + + Attributes: + name (str): + Output only. Identifier. Unique name of the organization + impact in this scope including organization and location + using the form + ``organizations/{organization_id}/locations/{location}/organizationImpacts/{organization_impact_id}``. + + ``organization_id`` - ID (number) of the organization that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``organization_impact_id`` - ID of the `OrganizationImpact + resource `__. + events (MutableSequence[str]): + Output only. A list of event names impacting + the asset. + asset (google.cloud.servicehealth_v1.types.Asset): + Output only. Google Cloud asset possibly + impacted by the specified events. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the affected + project was last modified. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + events: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + asset: "Asset" = proto.Field( + proto.MESSAGE, + number=3, + message="Asset", + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Asset(proto.Message): + r"""Represents the asset impacted by the events. + + Attributes: + asset_name (str): + Output only. Full name of the resource as defined in + `Resource + Names `__. + asset_type (str): + Output only. Type of the asset. Example: + ``"cloudresourcemanager.googleapis.com/Project"`` + """ + + asset_name: str = proto.Field( + proto.STRING, + number=1, + ) + asset_type: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEventsRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. Parent value using the form + ``projects/{project_id}/locations/{location}/events``. + + ``project_id`` - ID of the project for which to list service + health events. ``location`` - The location to get the + service health events from. To retrieve service health + events of category = INCIDENT, use ``location`` = + ``global``. + page_size (int): + Optional. The maximum number of events that should be + returned. Acceptable values are 1 to 100, inclusive. (The + default value is 10.) If more results are available, the + service returns a next_page_token that you can use to get + the next page of results in subsequent list requests. The + service may return fewer events than the requested + page_size. + page_token (str): + Optional. A token identifying a page of results the server + should return. Provide Page token returned by a previous + ``ListEvents`` call to retrieve the next page of results. + When paginating, all other parameters provided to + ``ListEvents`` must match the call that provided the page + token. + filter (str): + Optional. A filter expression that filters resources listed + in the response. The expression takes the following forms: + + - field=value for ``category`` and ``state``\ + - field <, >, <=, or >= value for ``update_time`` Examples: + ``category=INCIDENT``, + ``update_time>=2000-01-01T11:30:00-04:00`` + + .. raw:: html + +
+ + Multiple filter queries are separated by spaces. Example: + ``category=INCIDENT state=ACTIVE``. + + By default, each expression is an AND expression. However, + you can include AND and OR expressions explicitly. + + Filter is supported for the following fields: ``category``, + ``state``, ``update_time`` + view (google.cloud.servicehealth_v1.types.EventView): + Optional. Event fields to include in + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + view: "EventView" = proto.Field( + proto.ENUM, + number=6, + enum="EventView", + ) + + +class ListEventsResponse(proto.Message): + r""" + + Attributes: + events (MutableSequence[google.cloud.servicehealth_v1.types.Event]): + Output only. List of events. + next_page_token (str): + Output only. The continuation token, used to page through + large result sets. Provide this value in a subsequent + request as page_token to retrieve the next page. + + If this field is not present, there are no subsequent + results. + unreachable (MutableSequence[str]): + Output only. Locations that could not be + reached. + """ + + @property + def raw_page(self): + return self + + events: MutableSequence["Event"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Event", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetEventRequest(proto.Message): + r"""Message for getting an event + + Attributes: + name (str): + Required. Unique name of the event in this scope including + project and location using the form + ``projects/{project_id}/locations/{location}/events/{event_id}``. + + ``project_id`` - Project ID of the project that contains the + event. ``location`` - The location to get the service health + events from. ``event_id`` - Event ID to retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOrganizationEventsRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationEvents``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``location`` - The location to get the service health events + from. To retrieve service health events of category = + INCIDENT, use ``location`` = ``global``. + page_size (int): + Optional. The maximum number of events that should be + returned. Acceptable values are ``1`` to ``100``, inclusive. + (The default value is ``10``.) If more results are + available, the service returns a ``next_page_token`` that + you can use to get the next page of results in subsequent + list requests. The service may return fewer events than the + requested ``page_size``. + page_token (str): + Optional. A token identifying a page of results the server + should return. + + Provide Page token returned by a previous + ``ListOrganizationEvents`` call to retrieve the next page of + results. + + When paginating, all other parameters provided to + ``ListOrganizationEvents`` must match the call that provided + the page token. + filter (str): + Optional. A filter expression that filters resources listed + in the response. The expression takes the following forms: + + - field=value for ``category`` and ``state`` + - field <, >, <=, or >= value for ``update_time`` + + Examples: ``category=INCIDENT``, + ``update_time>=2000-01-01T11:30:00-04:00`` + + Multiple filter queries are space-separated. Example: + ``category=INCIDENT state=ACTIVE``. + + By default, each expression is an AND expression. However, + you can include AND and OR expressions explicitly. + + Filter is supported for the following fields: ``category``, + ``state``, ``update_time`` + view (google.cloud.servicehealth_v1.types.OrganizationEventView): + Optional. OrganizationEvent fields to include + in response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + view: "OrganizationEventView" = proto.Field( + proto.ENUM, + number=6, + enum="OrganizationEventView", + ) + + +class ListOrganizationEventsResponse(proto.Message): + r""" + + Attributes: + organization_events (MutableSequence[google.cloud.servicehealth_v1.types.OrganizationEvent]): + Output only. List of organization events + affecting an organization. + next_page_token (str): + Output only. The continuation token, used to page through + large result sets. Provide this value in a subsequent + request as ``page_token`` to retrieve the next page. + + If this field is not present, there are no subsequent + results. + unreachable (MutableSequence[str]): + Output only. Locations that could not be + reached. + """ + + @property + def raw_page(self): + return self + + organization_events: MutableSequence["OrganizationEvent"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OrganizationEvent", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetOrganizationEventRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. Unique name of the event in this scope including + organization and event ID using the form + ``organizations/{organization_id}/locations/locations/global/organizationEvents/{event_id}``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``event_id`` - Organization event ID to retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOrganizationImpactsRequest(proto.Message): + r"""Message for requesting list of OrganizationImpacts + + Attributes: + parent (str): + Required. Parent value using the form + ``organizations/{organization_id}/locations/{location}/organizationImpacts``. + + ``organization_id`` - ID (number) of the project that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + page_size (int): + Optional. The maximum number of events that should be + returned. Acceptable values are ``1`` to ``100``, inclusive. + The default value is ``10``. + + If more results are available, the service returns a + ``next_page_token`` that can be used to get the next page of + results in subsequent list requests. The service may return + fewer + `impacts `__ + than the requested ``page_size``. + page_token (str): + Optional. A token identifying a page of results the server + should return. + + Provide ``page_token`` returned by a previous + ``ListOrganizationImpacts`` call to retrieve the next page + of results. + + When paginating, all other parameters provided to + ``ListOrganizationImpacts`` must match the call that + provided the page token. + filter (str): + Optional. A filter expression that filters resources listed + in the response. The expression is in the form of + ``field:value`` for checking if a repeated field contains a + value. + + Example: + ``events:organizations%2F{organization_id}%2Flocations%2Fglobal%2ForganizationEvents%2Fevent-id`` + + To get your ``{organization_id}``, see `Getting your + organization resource + ID `__. + + Multiple filter queries are separated by spaces. + + By default, each expression is an AND expression. However, + you can include AND and OR expressions explicitly. Filter is + supported for the following fields: ``events``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListOrganizationImpactsResponse(proto.Message): + r""" + + Attributes: + organization_impacts (MutableSequence[google.cloud.servicehealth_v1.types.OrganizationImpact]): + Output only. List of + `impacts `__ + for an organization affected by service health events. + next_page_token (str): + Output only. The continuation token, used to page through + large result sets. Provide this value in a subsequent + request as ``page_token`` to retrieve the next page. + + If this field is not present, there are no subsequent + results. + unreachable (MutableSequence[str]): + Output only. Locations that could not be + reached. + """ + + @property + def raw_page(self): + return self + + organization_impacts: MutableSequence["OrganizationImpact"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OrganizationImpact", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetOrganizationImpactRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. Name of the resource using the form + ``organizations/{organization_id}/locations/global/organizationImpacts/{organization_impact_id}``. + + ``organization_id`` - ID (number) of the organization that + contains the event. To get your ``organization_id``, see + `Getting your organization resource + ID `__. + ``organization_impact_id`` - ID of the `OrganizationImpact + resource `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_service.py b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_service.py new file mode 100644 index 000000000000..11800d8c71ad --- /dev/null +++ b/packages/google-cloud-servicehealth/google/cloud/servicehealth_v1/types/event_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.servicehealth.v1", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-servicehealth/mypy.ini b/packages/google-cloud-servicehealth/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-servicehealth/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-servicehealth/noxfile.py b/packages/google-cloud-servicehealth/noxfile.py new file mode 100644 index 000000000000..7d3551347c78 --- /dev/null +++ b/packages/google-cloud-servicehealth/noxfile.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_async.py new file mode 100644 index 000000000000..6a1aa71a09b6 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_GetEvent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_get_event(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetEventRequest( + name="name_value", + ) + + # Make the request + response = await client.get_event(request=request) + + # Handle the response + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_GetEvent_async] diff --git a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_sync.py similarity index 75% rename from packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_async.py rename to packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_sync.py index 2b57ea3be08f..fc240b2da2d3 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_async.py +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_event_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BufferTask +# Snippet for GetEvent # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-tasks +# python3 -m pip install google-cloud-servicehealth -# [START cloudtasks_v2beta3_generated_CloudTasks_BufferTask_async] +# [START servicehealth_v1_generated_ServiceHealth_GetEvent_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import tasks_v2beta3 +from google.cloud import servicehealth_v1 -async def sample_buffer_task(): +def sample_get_event(): # Create a client - client = tasks_v2beta3.CloudTasksAsyncClient() + client = servicehealth_v1.ServiceHealthClient() # Initialize request argument(s) - request = tasks_v2beta3.BufferTaskRequest( - queue="queue_value", + request = servicehealth_v1.GetEventRequest( + name="name_value", ) # Make the request - response = await client.buffer_task(request=request) + response = client.get_event(request=request) # Handle the response print(response) -# [END cloudtasks_v2beta3_generated_CloudTasks_BufferTask_async] +# [END servicehealth_v1_generated_ServiceHealth_GetEvent_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_async.py new file mode 100644 index 000000000000..c2cead3be763 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganizationEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_get_organization_event(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationEventRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization_event(request=request) + + # Handle the response + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_async] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_sync.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_sync.py new file mode 100644 index 000000000000..37cf306b4cd3 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_event_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganizationEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +def sample_get_organization_event(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationEventRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization_event(request=request) + + # Handle the response + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_async.py new file mode 100644 index 000000000000..dc8886dee919 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganizationImpact +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_get_organization_impact(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationImpactRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization_impact(request=request) + + # Handle the response + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_async] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_sync.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_sync.py new file mode 100644 index 000000000000..86b188d4bd00 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_get_organization_impact_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganizationImpact +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +def sample_get_organization_impact(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.GetOrganizationImpactRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization_impact(request=request) + + # Handle the response + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_async.py new file mode 100644 index 000000000000..0e3fd8f6ccc2 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_list_events(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_events(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListEvents_async] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_sync.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_sync.py new file mode 100644 index 000000000000..f3c499f467d9 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_events_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +def sample_list_events(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_events(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListEvents_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_async.py new file mode 100644 index 000000000000..94dfd5d8f1f0 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizationEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_list_organization_events(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_events(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_async] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_sync.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_sync.py new file mode 100644 index 000000000000..436a7e1d2e45 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_events_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizationEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +def sample_list_organization_events(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationEventsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_events(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_async.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_async.py new file mode 100644 index 000000000000..210b6acd17b8 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizationImpacts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +async def sample_list_organization_impacts(): + # Create a client + client = servicehealth_v1.ServiceHealthAsyncClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationImpactsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_impacts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_async] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_sync.py b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_sync.py new file mode 100644 index 000000000000..ccc577a93864 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/servicehealth_v1_generated_service_health_list_organization_impacts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizationImpacts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-servicehealth + + +# [START servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import servicehealth_v1 + + +def sample_list_organization_impacts(): + # Create a client + client = servicehealth_v1.ServiceHealthClient() + + # Initialize request argument(s) + request = servicehealth_v1.ListOrganizationImpactsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_organization_impacts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_sync] diff --git a/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json b/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json new file mode 100644 index 000000000000..22600c1e9d56 --- /dev/null +++ b/packages/google-cloud-servicehealth/samples/generated_samples/snippet_metadata_google.cloud.servicehealth.v1.json @@ -0,0 +1,981 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.servicehealth.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-servicehealth", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.get_event", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetEvent", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetEventRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.Event", + "shortName": "get_event" + }, + "description": "Sample for GetEvent", + "file": "servicehealth_v1_generated_service_health_get_event_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetEvent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_event_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.get_event", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetEvent", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetEventRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.Event", + "shortName": "get_event" + }, + "description": "Sample for GetEvent", + "file": "servicehealth_v1_generated_service_health_get_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetEvent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_event_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.get_organization_event", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetOrganizationEvent", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetOrganizationEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetOrganizationEventRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.OrganizationEvent", + "shortName": "get_organization_event" + }, + "description": "Sample for GetOrganizationEvent", + "file": "servicehealth_v1_generated_service_health_get_organization_event_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_organization_event_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.get_organization_event", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetOrganizationEvent", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetOrganizationEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetOrganizationEventRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.OrganizationEvent", + "shortName": "get_organization_event" + }, + "description": "Sample for GetOrganizationEvent", + "file": "servicehealth_v1_generated_service_health_get_organization_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetOrganizationEvent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_organization_event_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.get_organization_impact", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetOrganizationImpact", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetOrganizationImpact" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetOrganizationImpactRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.OrganizationImpact", + "shortName": "get_organization_impact" + }, + "description": "Sample for GetOrganizationImpact", + "file": "servicehealth_v1_generated_service_health_get_organization_impact_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_organization_impact_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.get_organization_impact", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.GetOrganizationImpact", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "GetOrganizationImpact" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.GetOrganizationImpactRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.types.OrganizationImpact", + "shortName": "get_organization_impact" + }, + "description": "Sample for GetOrganizationImpact", + "file": "servicehealth_v1_generated_service_health_get_organization_impact_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_GetOrganizationImpact_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_get_organization_impact_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.list_events", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListEvents", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListEventsAsyncPager", + "shortName": "list_events" + }, + "description": "Sample for ListEvents", + "file": "servicehealth_v1_generated_service_health_list_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListEvents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.list_events", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListEvents", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListEventsPager", + "shortName": "list_events" + }, + "description": "Sample for ListEvents", + "file": "servicehealth_v1_generated_service_health_list_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListEvents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_events_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.list_organization_events", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListOrganizationEvents", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListOrganizationEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationEventsAsyncPager", + "shortName": "list_organization_events" + }, + "description": "Sample for ListOrganizationEvents", + "file": "servicehealth_v1_generated_service_health_list_organization_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_organization_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.list_organization_events", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListOrganizationEvents", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListOrganizationEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListOrganizationEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationEventsPager", + "shortName": "list_organization_events" + }, + "description": "Sample for ListOrganizationEvents", + "file": "servicehealth_v1_generated_service_health_list_organization_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListOrganizationEvents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_organization_events_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient", + "shortName": "ServiceHealthAsyncClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthAsyncClient.list_organization_impacts", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListOrganizationImpacts", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListOrganizationImpacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationImpactsAsyncPager", + "shortName": "list_organization_impacts" + }, + "description": "Sample for ListOrganizationImpacts", + "file": "servicehealth_v1_generated_service_health_list_organization_impacts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_organization_impacts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient", + "shortName": "ServiceHealthClient" + }, + "fullName": "google.cloud.servicehealth_v1.ServiceHealthClient.list_organization_impacts", + "method": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth.ListOrganizationImpacts", + "service": { + "fullName": "google.cloud.servicehealth.v1.ServiceHealth", + "shortName": "ServiceHealth" + }, + "shortName": "ListOrganizationImpacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.servicehealth_v1.types.ListOrganizationImpactsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.servicehealth_v1.services.service_health.pagers.ListOrganizationImpactsPager", + "shortName": "list_organization_impacts" + }, + "description": "Sample for ListOrganizationImpacts", + "file": "servicehealth_v1_generated_service_health_list_organization_impacts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "servicehealth_v1_generated_ServiceHealth_ListOrganizationImpacts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "servicehealth_v1_generated_service_health_list_organization_impacts_sync.py" + } + ] +} diff --git a/packages/google-cloud-servicehealth/scripts/decrypt-secrets.sh b/packages/google-cloud-servicehealth/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-servicehealth/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-servicehealth/scripts/fixup_servicehealth_v1_keywords.py b/packages/google-cloud-servicehealth/scripts/fixup_servicehealth_v1_keywords.py new file mode 100644 index 000000000000..32b095e7591e --- /dev/null +++ b/packages/google-cloud-servicehealth/scripts/fixup_servicehealth_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class servicehealthCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'get_event': ('name', ), + 'get_organization_event': ('name', ), + 'get_organization_impact': ('name', ), + 'list_events': ('parent', 'page_size', 'page_token', 'filter', 'view', ), + 'list_organization_events': ('parent', 'page_size', 'page_token', 'filter', 'view', ), + 'list_organization_impacts': ('parent', 'page_size', 'page_token', 'filter', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=servicehealthCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the servicehealth client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-servicehealth/setup.py b/packages/google-cloud-servicehealth/setup.py new file mode 100644 index 000000000000..e094adf6c4ed --- /dev/null +++ b/packages/google-cloud-servicehealth/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-servicehealth" + + +description = "Google Cloud Servicehealth API client library" + +version = None + +with open( + os.path.join(package_root, "google/cloud/servicehealth/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-servicehealth/testing/.gitignore b/packages/google-cloud-servicehealth/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.10.txt b/packages/google-cloud-servicehealth/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.11.txt b/packages/google-cloud-servicehealth/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.12.txt b/packages/google-cloud-servicehealth/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.7.txt b/packages/google-cloud-servicehealth/testing/constraints-3.7.txt new file mode 100644 index 000000000000..185f7d366c2f --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.3 +protobuf==3.19.5 diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.8.txt b/packages/google-cloud-servicehealth/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-servicehealth/testing/constraints-3.9.txt b/packages/google-cloud-servicehealth/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-servicehealth/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-servicehealth/tests/__init__.py b/packages/google-cloud-servicehealth/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-servicehealth/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-servicehealth/tests/unit/__init__.py b/packages/google-cloud-servicehealth/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-servicehealth/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-servicehealth/tests/unit/gapic/__init__.py b/packages/google-cloud-servicehealth/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-servicehealth/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/__init__.py b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py new file mode 100644 index 000000000000..fe663dbdb440 --- /dev/null +++ b/packages/google-cloud-servicehealth/tests/unit/gapic/servicehealth_v1/test_service_health.py @@ -0,0 +1,5986 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.servicehealth_v1.services.service_health import ( + ServiceHealthAsyncClient, + ServiceHealthClient, + pagers, + transports, +) +from google.cloud.servicehealth_v1.types import event_resources + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceHealthClient._get_default_mtls_endpoint(None) is None + assert ( + ServiceHealthClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ServiceHealthClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ServiceHealthClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ServiceHealthClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ServiceHealthClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ServiceHealthClient, "grpc"), + (ServiceHealthAsyncClient, "grpc_asyncio"), + (ServiceHealthClient, "rest"), + ], +) +def test_service_health_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "servicehealth.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://servicehealth.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ServiceHealthGrpcTransport, "grpc"), + (transports.ServiceHealthGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServiceHealthRestTransport, "rest"), + ], +) +def test_service_health_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ServiceHealthClient, "grpc"), + (ServiceHealthAsyncClient, "grpc_asyncio"), + (ServiceHealthClient, "rest"), + ], +) +def test_service_health_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "servicehealth.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://servicehealth.googleapis.com" + ) + + +def test_service_health_client_get_transport_class(): + transport = ServiceHealthClient.get_transport_class() + available_transports = [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthRestTransport, + ] + assert transport in available_transports + + transport = ServiceHealthClient.get_transport_class("grpc") + assert transport == transports.ServiceHealthGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ServiceHealthClient, transports.ServiceHealthGrpcTransport, "grpc"), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ServiceHealthClient, transports.ServiceHealthRestTransport, "rest"), + ], +) +@mock.patch.object( + ServiceHealthClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthClient), +) +@mock.patch.object( + ServiceHealthAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthAsyncClient), +) +def test_service_health_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceHealthClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceHealthClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ServiceHealthClient, transports.ServiceHealthGrpcTransport, "grpc", "true"), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ServiceHealthClient, transports.ServiceHealthGrpcTransport, "grpc", "false"), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ServiceHealthClient, transports.ServiceHealthRestTransport, "rest", "true"), + (ServiceHealthClient, transports.ServiceHealthRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ServiceHealthClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthClient), +) +@mock.patch.object( + ServiceHealthAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_health_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ServiceHealthClient, ServiceHealthAsyncClient] +) +@mock.patch.object( + ServiceHealthClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthClient), +) +@mock.patch.object( + ServiceHealthAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServiceHealthAsyncClient), +) +def test_service_health_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ServiceHealthClient, transports.ServiceHealthGrpcTransport, "grpc"), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ServiceHealthClient, transports.ServiceHealthRestTransport, "rest"), + ], +) +def test_service_health_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ServiceHealthClient, + transports.ServiceHealthGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ServiceHealthClient, transports.ServiceHealthRestTransport, "rest", None), + ], +) +def test_service_health_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_service_health_client_client_options_from_dict(): + with mock.patch( + "google.cloud.servicehealth_v1.services.service_health.transports.ServiceHealthGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ServiceHealthClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ServiceHealthClient, + transports.ServiceHealthGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ServiceHealthAsyncClient, + transports.ServiceHealthGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_service_health_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "servicehealth.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="servicehealth.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListEventsRequest, + dict, + ], +) +def test_list_events(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + client.list_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListEventsRequest() + + +@pytest.mark.asyncio +async def test_list_events_async( + transport: str = "grpc_asyncio", request_type=event_resources.ListEventsRequest +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_events_async_from_dict(): + await test_list_events_async(request_type=dict) + + +def test_list_events_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + call.return_value = event_resources.ListEventsResponse() + client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_events_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListEventsResponse() + ) + await client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_events_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListEventsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_events( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_events_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_events( + event_resources.ListEventsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_events_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListEventsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListEventsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_events( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_events_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_events( + event_resources.ListEventsRequest(), + parent="parent_value", + ) + + +def test_list_events_pager(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + event_resources.Event(), + ], + next_page_token="abc", + ), + event_resources.ListEventsResponse( + events=[], + next_page_token="def", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + ], + next_page_token="ghi", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_events(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.Event) for i in results) + + +def test_list_events_pages(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + event_resources.Event(), + ], + next_page_token="abc", + ), + event_resources.ListEventsResponse( + events=[], + next_page_token="def", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + ], + next_page_token="ghi", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + ], + ), + RuntimeError, + ) + pages = list(client.list_events(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_events_async_pager(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_events), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + event_resources.Event(), + ], + next_page_token="abc", + ), + event_resources.ListEventsResponse( + events=[], + next_page_token="def", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + ], + next_page_token="ghi", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_events( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, event_resources.Event) for i in responses) + + +@pytest.mark.asyncio +async def test_list_events_async_pages(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_events), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + event_resources.Event(), + ], + next_page_token="abc", + ), + event_resources.ListEventsResponse( + events=[], + next_page_token="def", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + ], + next_page_token="ghi", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_events(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetEventRequest, + dict, + ], +) +def test_get_event(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.Event( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.Event.EventCategory.INCIDENT, + detailed_category=event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.Event.State.ACTIVE, + detailed_state=event_resources.Event.DetailedState.EMERGING, + relevance=event_resources.Event.Relevance.UNKNOWN, + parent_event="parent_event_value", + ) + response = client.get_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.Event) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.Event.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.Event.State.ACTIVE + assert response.detailed_state == event_resources.Event.DetailedState.EMERGING + assert response.relevance == event_resources.Event.Relevance.UNKNOWN + assert response.parent_event == "parent_event_value" + + +def test_get_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + client.get_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetEventRequest() + + +@pytest.mark.asyncio +async def test_get_event_async( + transport: str = "grpc_asyncio", request_type=event_resources.GetEventRequest +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.Event( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.Event.EventCategory.INCIDENT, + detailed_category=event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.Event.State.ACTIVE, + detailed_state=event_resources.Event.DetailedState.EMERGING, + relevance=event_resources.Event.Relevance.UNKNOWN, + parent_event="parent_event_value", + ) + ) + response = await client.get_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.Event) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.Event.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.Event.State.ACTIVE + assert response.detailed_state == event_resources.Event.DetailedState.EMERGING + assert response.relevance == event_resources.Event.Relevance.UNKNOWN + assert response.parent_event == "parent_event_value" + + +@pytest.mark.asyncio +async def test_get_event_async_from_dict(): + await test_get_event_async(request_type=dict) + + +def test_get_event_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetEventRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + call.return_value = event_resources.Event() + client.get_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_event_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetEventRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.Event() + ) + await client.get_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_event_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.Event() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_event( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_event_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_event( + event_resources.GetEventRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_event_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.Event() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.Event() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_event( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_event_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_event( + event_resources.GetEventRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListOrganizationEventsRequest, + dict, + ], +) +def test_list_organization_events(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_organization_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationEventsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_organization_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + client.list_organization_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationEventsRequest() + + +@pytest.mark.asyncio +async def test_list_organization_events_async( + transport: str = "grpc_asyncio", + request_type=event_resources.ListOrganizationEventsRequest, +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_organization_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationEventsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationEventsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_organization_events_async_from_dict(): + await test_list_organization_events_async(request_type=dict) + + +def test_list_organization_events_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListOrganizationEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + call.return_value = event_resources.ListOrganizationEventsResponse() + client.list_organization_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_organization_events_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListOrganizationEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationEventsResponse() + ) + await client.list_organization_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_organization_events_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationEventsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_organization_events( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_organization_events_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_organization_events( + event_resources.ListOrganizationEventsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_organization_events_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationEventsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationEventsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_organization_events( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_organization_events_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_organization_events( + event_resources.ListOrganizationEventsRequest(), + parent="parent_value", + ) + + +def test_list_organization_events_pager(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[], + next_page_token="def", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_organization_events(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.OrganizationEvent) for i in results) + + +def test_list_organization_events_pages(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[], + next_page_token="def", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + ), + RuntimeError, + ) + pages = list(client.list_organization_events(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_organization_events_async_pager(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[], + next_page_token="def", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_organization_events( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, event_resources.OrganizationEvent) for i in responses) + + +@pytest.mark.asyncio +async def test_list_organization_events_async_pages(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_events), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[], + next_page_token="def", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_organization_events(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetOrganizationEventRequest, + dict, + ], +) +def test_get_organization_event(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationEvent( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.OrganizationEvent.EventCategory.INCIDENT, + detailed_category=event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.OrganizationEvent.State.ACTIVE, + detailed_state=event_resources.OrganizationEvent.DetailedState.EMERGING, + parent_event="parent_event_value", + ) + response = client.get_organization_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationEvent) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.OrganizationEvent.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.OrganizationEvent.State.ACTIVE + assert ( + response.detailed_state + == event_resources.OrganizationEvent.DetailedState.EMERGING + ) + assert response.parent_event == "parent_event_value" + + +def test_get_organization_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + client.get_organization_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationEventRequest() + + +@pytest.mark.asyncio +async def test_get_organization_event_async( + transport: str = "grpc_asyncio", + request_type=event_resources.GetOrganizationEventRequest, +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationEvent( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.OrganizationEvent.EventCategory.INCIDENT, + detailed_category=event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.OrganizationEvent.State.ACTIVE, + detailed_state=event_resources.OrganizationEvent.DetailedState.EMERGING, + parent_event="parent_event_value", + ) + ) + response = await client.get_organization_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationEvent) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.OrganizationEvent.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.OrganizationEvent.State.ACTIVE + assert ( + response.detailed_state + == event_resources.OrganizationEvent.DetailedState.EMERGING + ) + assert response.parent_event == "parent_event_value" + + +@pytest.mark.asyncio +async def test_get_organization_event_async_from_dict(): + await test_get_organization_event_async(request_type=dict) + + +def test_get_organization_event_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetOrganizationEventRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + call.return_value = event_resources.OrganizationEvent() + client.get_organization_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_event_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetOrganizationEventRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationEvent() + ) + await client.get_organization_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_event_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationEvent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization_event( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_event_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization_event( + event_resources.GetOrganizationEventRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_event_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationEvent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationEvent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization_event( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_event_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization_event( + event_resources.GetOrganizationEventRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListOrganizationImpactsRequest, + dict, + ], +) +def test_list_organization_impacts(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationImpactsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_organization_impacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationImpactsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationImpactsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_organization_impacts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + client.list_organization_impacts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationImpactsRequest() + + +@pytest.mark.asyncio +async def test_list_organization_impacts_async( + transport: str = "grpc_asyncio", + request_type=event_resources.ListOrganizationImpactsRequest, +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationImpactsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_organization_impacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.ListOrganizationImpactsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationImpactsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_organization_impacts_async_from_dict(): + await test_list_organization_impacts_async(request_type=dict) + + +def test_list_organization_impacts_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListOrganizationImpactsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + call.return_value = event_resources.ListOrganizationImpactsResponse() + client.list_organization_impacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_organization_impacts_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.ListOrganizationImpactsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationImpactsResponse() + ) + await client.list_organization_impacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_organization_impacts_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationImpactsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_organization_impacts( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_organization_impacts_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_organization_impacts( + event_resources.ListOrganizationImpactsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_organization_impacts_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.ListOrganizationImpactsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.ListOrganizationImpactsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_organization_impacts( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_organization_impacts_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_organization_impacts( + event_resources.ListOrganizationImpactsRequest(), + parent="parent_value", + ) + + +def test_list_organization_impacts_pager(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[], + next_page_token="def", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_organization_impacts(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.OrganizationImpact) for i in results) + + +def test_list_organization_impacts_pages(transport_name: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[], + next_page_token="def", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + ), + RuntimeError, + ) + pages = list(client.list_organization_impacts(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_organization_impacts_async_pager(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[], + next_page_token="def", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_organization_impacts( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, event_resources.OrganizationImpact) for i in responses) + + +@pytest.mark.asyncio +async def test_list_organization_impacts_async_pages(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_organization_impacts), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[], + next_page_token="def", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_organization_impacts(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetOrganizationImpactRequest, + dict, + ], +) +def test_get_organization_impact(request_type, transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationImpact( + name="name_value", + events=["events_value"], + ) + response = client.get_organization_impact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationImpactRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationImpact) + assert response.name == "name_value" + assert response.events == ["events_value"] + + +def test_get_organization_impact_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + client.get_organization_impact() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationImpactRequest() + + +@pytest.mark.asyncio +async def test_get_organization_impact_async( + transport: str = "grpc_asyncio", + request_type=event_resources.GetOrganizationImpactRequest, +): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationImpact( + name="name_value", + events=["events_value"], + ) + ) + response = await client.get_organization_impact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == event_resources.GetOrganizationImpactRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationImpact) + assert response.name == "name_value" + assert response.events == ["events_value"] + + +@pytest.mark.asyncio +async def test_get_organization_impact_async_from_dict(): + await test_get_organization_impact_async(request_type=dict) + + +def test_get_organization_impact_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetOrganizationImpactRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + call.return_value = event_resources.OrganizationImpact() + client.get_organization_impact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_impact_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = event_resources.GetOrganizationImpactRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationImpact() + ) + await client.get_organization_impact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_impact_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationImpact() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization_impact( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_impact_flattened_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization_impact( + event_resources.GetOrganizationImpactRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_impact_flattened_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_organization_impact), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = event_resources.OrganizationImpact() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + event_resources.OrganizationImpact() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization_impact( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_impact_flattened_error_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization_impact( + event_resources.GetOrganizationImpactRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListEventsRequest, + dict, + ], +) +def test_list_events_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_events(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_events_rest_required_fields( + request_type=event_resources.ListEventsRequest, +): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.ListEventsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_events_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_events_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_list_events" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_list_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.ListEventsRequest.pb( + event_resources.ListEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = event_resources.ListEventsResponse.to_json( + event_resources.ListEventsResponse() + ) + + request = event_resources.ListEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.ListEventsResponse() + + client.list_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_events_rest_bad_request( + transport: str = "rest", request_type=event_resources.ListEventsRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_events(request) + + +def test_list_events_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/events" % client.transport._host, + args[1], + ) + + +def test_list_events_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_events( + event_resources.ListEventsRequest(), + parent="parent_value", + ) + + +def test_list_events_rest_pager(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + event_resources.Event(), + ], + next_page_token="abc", + ), + event_resources.ListEventsResponse( + events=[], + next_page_token="def", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + ], + next_page_token="ghi", + ), + event_resources.ListEventsResponse( + events=[ + event_resources.Event(), + event_resources.Event(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + event_resources.ListEventsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_events(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.Event) for i in results) + + pages = list(client.list_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetEventRequest, + dict, + ], +) +def test_get_event_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/events/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.Event( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.Event.EventCategory.INCIDENT, + detailed_category=event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.Event.State.ACTIVE, + detailed_state=event_resources.Event.DetailedState.EMERGING, + relevance=event_resources.Event.Relevance.UNKNOWN, + parent_event="parent_event_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.Event.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.Event) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.Event.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.Event.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.Event.State.ACTIVE + assert response.detailed_state == event_resources.Event.DetailedState.EMERGING + assert response.relevance == event_resources.Event.Relevance.UNKNOWN + assert response.parent_event == "parent_event_value" + + +def test_get_event_rest_required_fields(request_type=event_resources.GetEventRequest): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.Event() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.Event.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_event(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_event_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_event_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_get_event" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_get_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.GetEventRequest.pb( + event_resources.GetEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = event_resources.Event.to_json( + event_resources.Event() + ) + + request = event_resources.GetEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.Event() + + client.get_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_event_rest_bad_request( + transport: str = "rest", request_type=event_resources.GetEventRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/events/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_event(request) + + +def test_get_event_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.Event() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/events/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.Event.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/events/*}" % client.transport._host, + args[1], + ) + + +def test_get_event_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_event( + event_resources.GetEventRequest(), + name="name_value", + ) + + +def test_get_event_rest_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListOrganizationEventsRequest, + dict, + ], +) +def test_list_organization_events_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationEventsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_organization_events(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationEventsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_organization_events_rest_required_fields( + request_type=event_resources.ListOrganizationEventsRequest, +): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_organization_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_organization_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationEventsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationEventsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_organization_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_organization_events_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_organization_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_organization_events_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_list_organization_events" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_list_organization_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.ListOrganizationEventsRequest.pb( + event_resources.ListOrganizationEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + event_resources.ListOrganizationEventsResponse.to_json( + event_resources.ListOrganizationEventsResponse() + ) + ) + + request = event_resources.ListOrganizationEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.ListOrganizationEventsResponse() + + client.list_organization_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_organization_events_rest_bad_request( + transport: str = "rest", request_type=event_resources.ListOrganizationEventsRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_organization_events(request) + + +def test_list_organization_events_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_organization_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/organizationEvents" + % client.transport._host, + args[1], + ) + + +def test_list_organization_events_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_organization_events( + event_resources.ListOrganizationEventsRequest(), + parent="parent_value", + ) + + +def test_list_organization_events_rest_pager(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[], + next_page_token="def", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationEventsResponse( + organization_events=[ + event_resources.OrganizationEvent(), + event_resources.OrganizationEvent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + event_resources.ListOrganizationEventsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_organization_events(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.OrganizationEvent) for i in results) + + pages = list(client.list_organization_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetOrganizationEventRequest, + dict, + ], +) +def test_get_organization_event_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/organizationEvents/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationEvent( + name="name_value", + title="title_value", + description="description_value", + category=event_resources.OrganizationEvent.EventCategory.INCIDENT, + detailed_category=event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT, + state=event_resources.OrganizationEvent.State.ACTIVE, + detailed_state=event_resources.OrganizationEvent.DetailedState.EMERGING, + parent_event="parent_event_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.OrganizationEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationEvent) + assert response.name == "name_value" + assert response.title == "title_value" + assert response.description == "description_value" + assert response.category == event_resources.OrganizationEvent.EventCategory.INCIDENT + assert ( + response.detailed_category + == event_resources.OrganizationEvent.DetailedCategory.CONFIRMED_INCIDENT + ) + assert response.state == event_resources.OrganizationEvent.State.ACTIVE + assert ( + response.detailed_state + == event_resources.OrganizationEvent.DetailedState.EMERGING + ) + assert response.parent_event == "parent_event_value" + + +def test_get_organization_event_rest_required_fields( + request_type=event_resources.GetOrganizationEventRequest, +): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationEvent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.OrganizationEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization_event(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_event_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_event_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_get_organization_event" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_get_organization_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.GetOrganizationEventRequest.pb( + event_resources.GetOrganizationEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = event_resources.OrganizationEvent.to_json( + event_resources.OrganizationEvent() + ) + + request = event_resources.GetOrganizationEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.OrganizationEvent() + + client.get_organization_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_event_rest_bad_request( + transport: str = "rest", request_type=event_resources.GetOrganizationEventRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/organizationEvents/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization_event(request) + + +def test_get_organization_event_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationEvent() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/organizationEvents/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.OrganizationEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/organizationEvents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_organization_event_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization_event( + event_resources.GetOrganizationEventRequest(), + name="name_value", + ) + + +def test_get_organization_event_rest_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.ListOrganizationImpactsRequest, + dict, + ], +) +def test_list_organization_impacts_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationImpactsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationImpactsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_organization_impacts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationImpactsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_organization_impacts_rest_required_fields( + request_type=event_resources.ListOrganizationImpactsRequest, +): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_organization_impacts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_organization_impacts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationImpactsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationImpactsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_organization_impacts(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_organization_impacts_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_organization_impacts._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_organization_impacts_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_list_organization_impacts" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_list_organization_impacts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.ListOrganizationImpactsRequest.pb( + event_resources.ListOrganizationImpactsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + event_resources.ListOrganizationImpactsResponse.to_json( + event_resources.ListOrganizationImpactsResponse() + ) + ) + + request = event_resources.ListOrganizationImpactsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.ListOrganizationImpactsResponse() + + client.list_organization_impacts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_organization_impacts_rest_bad_request( + transport: str = "rest", request_type=event_resources.ListOrganizationImpactsRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_organization_impacts(request) + + +def test_list_organization_impacts_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.ListOrganizationImpactsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.ListOrganizationImpactsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_organization_impacts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/organizationImpacts" + % client.transport._host, + args[1], + ) + + +def test_list_organization_impacts_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_organization_impacts( + event_resources.ListOrganizationImpactsRequest(), + parent="parent_value", + ) + + +def test_list_organization_impacts_rest_pager(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + next_page_token="abc", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[], + next_page_token="def", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + ], + next_page_token="ghi", + ), + event_resources.ListOrganizationImpactsResponse( + organization_impacts=[ + event_resources.OrganizationImpact(), + event_resources.OrganizationImpact(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + event_resources.ListOrganizationImpactsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_organization_impacts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, event_resources.OrganizationImpact) for i in results) + + pages = list(client.list_organization_impacts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + event_resources.GetOrganizationImpactRequest, + dict, + ], +) +def test_get_organization_impact_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/organizationImpacts/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationImpact( + name="name_value", + events=["events_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.OrganizationImpact.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization_impact(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, event_resources.OrganizationImpact) + assert response.name == "name_value" + assert response.events == ["events_value"] + + +def test_get_organization_impact_rest_required_fields( + request_type=event_resources.GetOrganizationImpactRequest, +): + transport_class = transports.ServiceHealthRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization_impact._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization_impact._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationImpact() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = event_resources.OrganizationImpact.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization_impact(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_impact_rest_unset_required_fields(): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization_impact._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_impact_rest_interceptors(null_interceptor): + transport = transports.ServiceHealthRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServiceHealthRestInterceptor(), + ) + client = ServiceHealthClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServiceHealthRestInterceptor, "post_get_organization_impact" + ) as post, mock.patch.object( + transports.ServiceHealthRestInterceptor, "pre_get_organization_impact" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = event_resources.GetOrganizationImpactRequest.pb( + event_resources.GetOrganizationImpactRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = event_resources.OrganizationImpact.to_json( + event_resources.OrganizationImpact() + ) + + request = event_resources.GetOrganizationImpactRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = event_resources.OrganizationImpact() + + client.get_organization_impact( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_impact_rest_bad_request( + transport: str = "rest", request_type=event_resources.GetOrganizationImpactRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/organizationImpacts/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization_impact(request) + + +def test_get_organization_impact_rest_flattened(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = event_resources.OrganizationImpact() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/organizationImpacts/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = event_resources.OrganizationImpact.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization_impact(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/organizationImpacts/*}" + % client.transport._host, + args[1], + ) + + +def test_get_organization_impact_rest_flattened_error(transport: str = "rest"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization_impact( + event_resources.GetOrganizationImpactRequest(), + name="name_value", + ) + + +def test_get_organization_impact_rest_error(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceHealthClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceHealthClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceHealthClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceHealthClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceHealthClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceHealthGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServiceHealthGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + transports.ServiceHealthRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ServiceHealthClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServiceHealthGrpcTransport, + ) + + +def test_service_health_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceHealthTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_service_health_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.servicehealth_v1.services.service_health.transports.ServiceHealthTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ServiceHealthTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_events", + "get_event", + "list_organization_events", + "get_organization_event", + "list_organization_impacts", + "get_organization_impact", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_health_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.servicehealth_v1.services.service_health.transports.ServiceHealthTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceHealthTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_service_health_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.servicehealth_v1.services.service_health.transports.ServiceHealthTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceHealthTransport() + adc.assert_called_once() + + +def test_service_health_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceHealthClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + ], +) +def test_service_health_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + transports.ServiceHealthRestTransport, + ], +) +def test_service_health_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServiceHealthGrpcTransport, grpc_helpers), + (transports.ServiceHealthGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_service_health_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "servicehealth.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="servicehealth.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + ], +) +def test_service_health_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_service_health_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ServiceHealthRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_service_health_host_no_port(transport_name): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="servicehealth.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "servicehealth.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://servicehealth.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_service_health_host_with_port(transport_name): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="servicehealth.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "servicehealth.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://servicehealth.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_service_health_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceHealthClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceHealthClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_events._session + session2 = client2.transport.list_events._session + assert session1 != session2 + session1 = client1.transport.get_event._session + session2 = client2.transport.get_event._session + assert session1 != session2 + session1 = client1.transport.list_organization_events._session + session2 = client2.transport.list_organization_events._session + assert session1 != session2 + session1 = client1.transport.get_organization_event._session + session2 = client2.transport.get_organization_event._session + assert session1 != session2 + session1 = client1.transport.list_organization_impacts._session + session2 = client2.transport.list_organization_impacts._session + assert session1 != session2 + session1 = client1.transport.get_organization_impact._session + session2 = client2.transport.get_organization_impact._session + assert session1 != session2 + + +def test_service_health_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceHealthGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_service_health_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServiceHealthGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + ], +) +def test_service_health_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServiceHealthGrpcTransport, + transports.ServiceHealthGrpcAsyncIOTransport, + ], +) +def test_service_health_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_event_path(): + project = "squid" + location = "clam" + event = "whelk" + expected = "projects/{project}/locations/{location}/events/{event}".format( + project=project, + location=location, + event=event, + ) + actual = ServiceHealthClient.event_path(project, location, event) + assert expected == actual + + +def test_parse_event_path(): + expected = { + "project": "octopus", + "location": "oyster", + "event": "nudibranch", + } + path = ServiceHealthClient.event_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_event_path(path) + assert expected == actual + + +def test_organization_event_path(): + organization = "cuttlefish" + location = "mussel" + event = "winkle" + expected = "organizations/{organization}/locations/{location}/organizationEvents/{event}".format( + organization=organization, + location=location, + event=event, + ) + actual = ServiceHealthClient.organization_event_path(organization, location, event) + assert expected == actual + + +def test_parse_organization_event_path(): + expected = { + "organization": "nautilus", + "location": "scallop", + "event": "abalone", + } + path = ServiceHealthClient.organization_event_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_organization_event_path(path) + assert expected == actual + + +def test_organization_impact_path(): + organization = "squid" + location = "clam" + organization_impact = "whelk" + expected = "organizations/{organization}/locations/{location}/organizationImpacts/{organization_impact}".format( + organization=organization, + location=location, + organization_impact=organization_impact, + ) + actual = ServiceHealthClient.organization_impact_path( + organization, location, organization_impact + ) + assert expected == actual + + +def test_parse_organization_impact_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "organization_impact": "nudibranch", + } + path = ServiceHealthClient.organization_impact_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_organization_impact_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ServiceHealthClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ServiceHealthClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ServiceHealthClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ServiceHealthClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ServiceHealthClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ServiceHealthClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ServiceHealthClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ServiceHealthClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ServiceHealthClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ServiceHealthClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceHealthClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ServiceHealthTransport, "_prep_wrapped_messages" + ) as prep: + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ServiceHealthTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ServiceHealthClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ServiceHealthClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ServiceHealthAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ServiceHealthAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ServiceHealthClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ServiceHealthClient, transports.ServiceHealthGrpcTransport), + (ServiceHealthAsyncClient, transports.ServiceHealthGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-tasks/CHANGELOG.md b/packages/google-cloud-tasks/CHANGELOG.md index 71770a962120..3e8b1ffc298a 100644 --- a/packages/google-cloud-tasks/CHANGELOG.md +++ b/packages/google-cloud-tasks/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-tasks/#history +## [2.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-tasks-v2.15.0...google-cloud-tasks-v2.15.1) (2024-01-19) + + +### Bug Fixes + +* [google-cloud-tasks] remove BufferTask method from beta libraries, which cannot call it ([23e91f5](https://github.com/googleapis/google-cloud-python/commit/23e91f57cb5b1dcd12245039e98dc8f233e51063)) + ## [2.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-tasks-v2.14.2...google-cloud-tasks-v2.15.0) (2023-12-07) diff --git a/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py index fe776d9cbfb8..141479f6303c 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py @@ -21,8 +21,6 @@ from .services.cloud_tasks import CloudTasksAsyncClient, CloudTasksClient from .types.cloudtasks import ( AcknowledgeTaskRequest, - BufferTaskRequest, - BufferTaskResponse, CancelLeaseRequest, CreateQueueRequest, CreateTaskRequest, @@ -69,8 +67,6 @@ "AppEngineHttpTarget", "AppEngineRouting", "AttemptStatus", - "BufferTaskRequest", - "BufferTaskResponse", "CancelLeaseRequest", "CloudTasksClient", "CreateQueueRequest", diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json index 7ea03b379c34..ba53ab530288 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json @@ -15,11 +15,6 @@ "acknowledge_task" ] }, - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CancelLease": { "methods": [ "cancel_lease" @@ -130,11 +125,6 @@ "acknowledge_task" ] }, - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CancelLease": { "methods": [ "cancel_lease" @@ -245,11 +235,6 @@ "acknowledge_task" ] }, - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CancelLease": { "methods": [ "cancel_lease" diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py index 3d225848fcf8..22b8efbfd27f 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py @@ -42,7 +42,6 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore -from google.api import httpbody_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -2876,149 +2875,6 @@ async def sample_run_task(): # Done; return the response. return response - async def buffer_task( - self, - request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, - *, - queue: Optional[str] = None, - task_id: Optional[str] = None, - body: Optional[httpbody_pb2.HttpBody] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta2.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import tasks_v2beta2 - - async def sample_buffer_task(): - # Create a client - client = tasks_v2beta2.CloudTasksAsyncClient() - - # Initialize request argument(s) - request = tasks_v2beta2.BufferTaskRequest( - queue="queue_value", - ) - - # Make the request - response = await client.buffer_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.tasks_v2beta2.types.BufferTaskRequest, dict]]): - The request object. LINT.IfChange Request message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - queue (:class:`str`): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (:class:`str`): - Optional. Task ID for the task being - created. If not provided, a random task - ID is assigned to the task. - - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - body (:class:`google.api.httpbody_pb2.HttpBody`): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is - written to the [HttpRequest][payload] of the [Task]. - - This corresponds to the ``body`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, task_id, body]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloudtasks.BufferTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if task_id is not None: - request.task_id = task_id - if body is not None: - request.body = body - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.buffer_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("queue", request.queue), - ("task_id", request.task_id), - ) - ), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - async def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py index 3120b9e0aa65..fdd68b40454b 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py @@ -46,7 +46,6 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api import httpbody_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -3044,149 +3043,6 @@ def sample_run_task(): # Done; return the response. return response - def buffer_task( - self, - request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, - *, - queue: Optional[str] = None, - task_id: Optional[str] = None, - body: Optional[httpbody_pb2.HttpBody] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta2.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import tasks_v2beta2 - - def sample_buffer_task(): - # Create a client - client = tasks_v2beta2.CloudTasksClient() - - # Initialize request argument(s) - request = tasks_v2beta2.BufferTaskRequest( - queue="queue_value", - ) - - # Make the request - response = client.buffer_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.tasks_v2beta2.types.BufferTaskRequest, dict]): - The request object. LINT.IfChange Request message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - queue (str): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (str): - Optional. Task ID for the task being - created. If not provided, a random task - ID is assigned to the task. - - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - body (google.api.httpbody_pb2.HttpBody): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is - written to the [HttpRequest][payload] of the [Task]. - - This corresponds to the ``body`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, task_id, body]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.BufferTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.BufferTaskRequest): - request = cloudtasks.BufferTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if task_id is not None: - request.task_id = task_id - if body is not None: - request.body = body - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.buffer_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("queue", request.queue), - ("task_id", request.task_id), - ) - ), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def __enter__(self) -> "CloudTasksClient": return self diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py index f1f8bd036c79..4ec7359e142f 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py @@ -315,11 +315,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=20.0, client_info=client_info, ), - self.buffer_task: gapic_v1.method.wrap_method( - self.buffer_task, - default_timeout=20.0, - client_info=client_info, - ), } def close(self): @@ -510,15 +505,6 @@ def run_task( ) -> Callable[[cloudtasks.RunTaskRequest], Union[task.Task, Awaitable[task.Task]]]: raise NotImplementedError() - @property - def buffer_task( - self, - ) -> Callable[ - [cloudtasks.BufferTaskRequest], - Union[cloudtasks.BufferTaskResponse, Awaitable[cloudtasks.BufferTaskResponse]], - ]: - raise NotImplementedError() - @property def get_location( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py index 781246213eb3..ac655d733130 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py @@ -966,43 +966,6 @@ def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: ) return self._stubs["run_task"] - @property - def buffer_task( - self, - ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: - r"""Return a callable for the buffer task method over gRPC. - - Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta2.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - Returns: - Callable[[~.BufferTaskRequest], - ~.BufferTaskResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "buffer_task" not in self._stubs: - self._stubs["buffer_task"] = self.grpc_channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/BufferTask", - request_serializer=cloudtasks.BufferTaskRequest.serialize, - response_deserializer=cloudtasks.BufferTaskResponse.deserialize, - ) - return self._stubs["buffer_task"] - def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py index ca3d162f0b5f..def297cbcef8 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py @@ -991,45 +991,6 @@ def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task] ) return self._stubs["run_task"] - @property - def buffer_task( - self, - ) -> Callable[ - [cloudtasks.BufferTaskRequest], Awaitable[cloudtasks.BufferTaskResponse] - ]: - r"""Return a callable for the buffer task method over gRPC. - - Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta2.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - Returns: - Callable[[~.BufferTaskRequest], - Awaitable[~.BufferTaskResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "buffer_task" not in self._stubs: - self._stubs["buffer_task"] = self.grpc_channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/BufferTask", - request_serializer=cloudtasks.BufferTaskRequest.serialize, - response_deserializer=cloudtasks.BufferTaskResponse.deserialize, - ) - return self._stubs["buffer_task"] - def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py index a33234ad6b30..a7f3b709a223 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py @@ -76,14 +76,6 @@ def pre_acknowledge_task(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_buffer_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_buffer_task(self, response): - logging.log(f"Received response: {response}") - return response - def pre_cancel_lease(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -250,27 +242,6 @@ def pre_acknowledge_task( """ return request, metadata - def pre_buffer_task( - self, request: cloudtasks.BufferTaskRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[cloudtasks.BufferTaskRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for buffer_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudTasks server. - """ - return request, metadata - - def post_buffer_task( - self, response: cloudtasks.BufferTaskResponse - ) -> cloudtasks.BufferTaskResponse: - """Post-rpc interceptor for buffer_task - - Override in a subclass to manipulate the response - after it is returned by the CloudTasks server but before - it is returned to user code. - """ - return response - def pre_cancel_lease( self, request: cloudtasks.CancelLeaseRequest, @@ -868,104 +839,6 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _BufferTask(CloudTasksRestStub): - def __hash__(self): - return hash("BufferTask") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloudtasks.BufferTaskRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Call the buffer task method over HTTP. - - Args: - request (~.cloudtasks.BufferTaskRequest): - The request object. LINT.IfChange Request message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudtasks.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v2beta2/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_buffer_task(request, metadata) - pb_request = cloudtasks.BufferTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudtasks.BufferTaskResponse() - pb_resp = cloudtasks.BufferTaskResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_buffer_task(resp) - return resp - class _CancelLease(CloudTasksRestStub): def __hash__(self): return hash("CancelLease") @@ -2927,14 +2800,6 @@ def acknowledge_task( # In C++ this would require a dynamic_cast return self._AcknowledgeTask(self._session, self._host, self._interceptor) # type: ignore - @property - def buffer_task( - self, - ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BufferTask(self._session, self._host, self._interceptor) # type: ignore - @property def cancel_lease(self) -> Callable[[cloudtasks.CancelLeaseRequest], task.Task]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py index 1c68ddb34d67..b23bf25655e7 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py @@ -15,8 +15,6 @@ # from .cloudtasks import ( AcknowledgeTaskRequest, - BufferTaskRequest, - BufferTaskResponse, CancelLeaseRequest, CreateQueueRequest, CreateTaskRequest, @@ -58,8 +56,6 @@ __all__ = ( "AcknowledgeTaskRequest", - "BufferTaskRequest", - "BufferTaskResponse", "CancelLeaseRequest", "CreateQueueRequest", "CreateTaskRequest", diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py index 8a2a69260a63..6766871bd8eb 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py @@ -50,8 +50,6 @@ "RenewLeaseRequest", "CancelLeaseRequest", "RunTaskRequest", - "BufferTaskRequest", - "BufferTaskResponse", }, ) @@ -906,56 +904,4 @@ class RunTaskRequest(proto.Message): ) -class BufferTaskRequest(proto.Message): - r"""LINT.IfChange Request message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - - Attributes: - queue (str): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - task_id (str): - Optional. Task ID for the task being created. - If not provided, a random task ID is assigned to - the task. - body (google.api.httpbody_pb2.HttpBody): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is written to - the [HttpRequest][payload] of the [Task]. - """ - - queue: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=3, - message=httpbody_pb2.HttpBody, - ) - - -class BufferTaskResponse(proto.Message): - r"""Response message for - [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. - - Attributes: - task (google.cloud.tasks_v2beta2.types.Task): - The created task. - """ - - task: gct_task.Task = proto.Field( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py index d66d7b53db1f..845bf4d4da9f 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py @@ -20,8 +20,6 @@ from .services.cloud_tasks import CloudTasksAsyncClient, CloudTasksClient from .types.cloudtasks import ( - BufferTaskRequest, - BufferTaskResponse, CreateQueueRequest, CreateTaskRequest, DeleteQueueRequest, @@ -67,8 +65,6 @@ "AppEngineHttpRequest", "AppEngineRouting", "Attempt", - "BufferTaskRequest", - "BufferTaskResponse", "CloudTasksClient", "CreateQueueRequest", "CreateTaskRequest", diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json index 13303a87bd1b..c4ba140a815b 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json @@ -10,11 +10,6 @@ "grpc": { "libraryClient": "CloudTasksClient", "rpcs": { - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CreateQueue": { "methods": [ "create_queue" @@ -100,11 +95,6 @@ "grpc-async": { "libraryClient": "CloudTasksAsyncClient", "rpcs": { - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CreateQueue": { "methods": [ "create_queue" @@ -190,11 +180,6 @@ "rest": { "libraryClient": "CloudTasksClient", "rpcs": { - "BufferTask": { - "methods": [ - "buffer_task" - ] - }, "CreateQueue": { "methods": [ "create_queue" diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py index e11c4435df97..031628830b4b 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py @@ -42,7 +42,6 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore -from google.api import httpbody_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -2262,149 +2261,6 @@ async def sample_run_task(): # Done; return the response. return response - async def buffer_task( - self, - request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, - *, - queue: Optional[str] = None, - task_id: Optional[str] = None, - body: Optional[httpbody_pb2.HttpBody] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta3.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import tasks_v2beta3 - - async def sample_buffer_task(): - # Create a client - client = tasks_v2beta3.CloudTasksAsyncClient() - - # Initialize request argument(s) - request = tasks_v2beta3.BufferTaskRequest( - queue="queue_value", - ) - - # Make the request - response = await client.buffer_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.tasks_v2beta3.types.BufferTaskRequest, dict]]): - The request object. Request message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - queue (:class:`str`): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (:class:`str`): - Optional. Task ID for the task being - created. If not provided, a random task - ID is assigned to the task. - - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - body (:class:`google.api.httpbody_pb2.HttpBody`): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is - written to the [HttpRequest][payload] of the [Task]. - - This corresponds to the ``body`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, task_id, body]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloudtasks.BufferTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if task_id is not None: - request.task_id = task_id - if body is not None: - request.body = body - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.buffer_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("queue", request.queue), - ("task_id", request.task_id), - ) - ), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - async def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py index 568dfe69a331..9322c353bf95 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py @@ -46,7 +46,6 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.api import httpbody_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -2429,149 +2428,6 @@ def sample_run_task(): # Done; return the response. return response - def buffer_task( - self, - request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, - *, - queue: Optional[str] = None, - task_id: Optional[str] = None, - body: Optional[httpbody_pb2.HttpBody] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta3.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import tasks_v2beta3 - - def sample_buffer_task(): - # Create a client - client = tasks_v2beta3.CloudTasksClient() - - # Initialize request argument(s) - request = tasks_v2beta3.BufferTaskRequest( - queue="queue_value", - ) - - # Make the request - response = client.buffer_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.tasks_v2beta3.types.BufferTaskRequest, dict]): - The request object. Request message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - queue (str): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task_id (str): - Optional. Task ID for the task being - created. If not provided, a random task - ID is assigned to the task. - - This corresponds to the ``task_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - body (google.api.httpbody_pb2.HttpBody): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is - written to the [HttpRequest][payload] of the [Task]. - - This corresponds to the ``body`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, task_id, body]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.BufferTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.BufferTaskRequest): - request = cloudtasks.BufferTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if task_id is not None: - request.task_id = task_id - if body is not None: - request.body = body - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.buffer_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("queue", request.queue), - ("task_id", request.task_id), - ) - ), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def __enter__(self) -> "CloudTasksClient": return self diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py index cd7b0f657c98..bbac835d46b5 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py @@ -290,11 +290,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=20.0, client_info=client_info, ), - self.buffer_task: gapic_v1.method.wrap_method( - self.buffer_task, - default_timeout=20.0, - client_info=client_info, - ), } def close(self): @@ -442,15 +437,6 @@ def run_task( ) -> Callable[[cloudtasks.RunTaskRequest], Union[task.Task, Awaitable[task.Task]]]: raise NotImplementedError() - @property - def buffer_task( - self, - ) -> Callable[ - [cloudtasks.BufferTaskRequest], - Union[cloudtasks.BufferTaskResponse, Awaitable[cloudtasks.BufferTaskResponse]], - ]: - raise NotImplementedError() - @property def get_location( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py index 22ae3cf0f0a1..09c8e501af47 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py @@ -777,43 +777,6 @@ def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: ) return self._stubs["run_task"] - @property - def buffer_task( - self, - ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: - r"""Return a callable for the buffer task method over gRPC. - - Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta3.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - Returns: - Callable[[~.BufferTaskRequest], - ~.BufferTaskResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "buffer_task" not in self._stubs: - self._stubs["buffer_task"] = self.grpc_channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/BufferTask", - request_serializer=cloudtasks.BufferTaskRequest.serialize, - response_deserializer=cloudtasks.BufferTaskResponse.deserialize, - ) - return self._stubs["buffer_task"] - def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py index c138c66ec81b..21444c26a645 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py @@ -796,45 +796,6 @@ def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task] ) return self._stubs["run_task"] - @property - def buffer_task( - self, - ) -> Callable[ - [cloudtasks.BufferTaskRequest], Awaitable[cloudtasks.BufferTaskResponse] - ]: - r"""Return a callable for the buffer task method over gRPC. - - Creates and buffers a new task without the need to explicitly - define a Task message. The queue must have [HTTP - target][google.cloud.tasks.v2beta3.HttpTarget]. To create the - task with a custom ID, use the following format and set TASK_ID - to your desired ID: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer - To create the task with an automatically generated ID, use the - following format: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. - Note: This feature is in its experimental stage. You must - request access to the API through the `Cloud Tasks BufferTask - Experiment Signup form `__. - - Returns: - Callable[[~.BufferTaskRequest], - Awaitable[~.BufferTaskResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "buffer_task" not in self._stubs: - self._stubs["buffer_task"] = self.grpc_channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/BufferTask", - request_serializer=cloudtasks.BufferTaskRequest.serialize, - response_deserializer=cloudtasks.BufferTaskResponse.deserialize, - ) - return self._stubs["buffer_task"] - def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py index ef95cfc63459..c20e5e93f905 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py @@ -72,14 +72,6 @@ class CloudTasksRestInterceptor: .. code-block:: python class MyCustomCloudTasksInterceptor(CloudTasksRestInterceptor): - def pre_buffer_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_buffer_task(self, response): - logging.log(f"Received response: {response}") - return response - def pre_create_queue(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -206,27 +198,6 @@ def post_update_queue(self, response): """ - def pre_buffer_task( - self, request: cloudtasks.BufferTaskRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[cloudtasks.BufferTaskRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for buffer_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudTasks server. - """ - return request, metadata - - def post_buffer_task( - self, response: cloudtasks.BufferTaskResponse - ) -> cloudtasks.BufferTaskResponse: - """Post-rpc interceptor for buffer_task - - Override in a subclass to manipulate the response - after it is returned by the CloudTasks server but before - it is returned to user code. - """ - return response - def pre_create_queue( self, request: cloudtasks.CreateQueueRequest, @@ -677,104 +648,6 @@ def __init__( self._interceptor = interceptor or CloudTasksRestInterceptor() self._prep_wrapped_messages(client_info) - class _BufferTask(CloudTasksRestStub): - def __hash__(self): - return hash("BufferTask") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloudtasks.BufferTaskRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.BufferTaskResponse: - r"""Call the buffer task method over HTTP. - - Args: - request (~.cloudtasks.BufferTaskRequest): - The request object. Request message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudtasks.BufferTaskResponse: - Response message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v2beta3/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_buffer_task(request, metadata) - pb_request = cloudtasks.BufferTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudtasks.BufferTaskResponse() - pb_resp = cloudtasks.BufferTaskResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_buffer_task(resp) - return resp - class _CreateQueue(CloudTasksRestStub): def __hash__(self): return hash("CreateQueue") @@ -2422,14 +2295,6 @@ def __call__( resp = self._interceptor.post_update_queue(resp) return resp - @property - def buffer_task( - self, - ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BufferTask(self._session, self._host, self._interceptor) # type: ignore - @property def create_queue( self, diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py index 77d5ff35fbc2..119581334cbc 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py @@ -14,8 +14,6 @@ # limitations under the License. # from .cloudtasks import ( - BufferTaskRequest, - BufferTaskResponse, CreateQueueRequest, CreateTaskRequest, DeleteQueueRequest, @@ -50,8 +48,6 @@ from .task import Attempt, Task __all__ = ( - "BufferTaskRequest", - "BufferTaskResponse", "CreateQueueRequest", "CreateTaskRequest", "DeleteQueueRequest", diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py index 04169d15e969..6fe514acde94 100644 --- a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py @@ -17,7 +17,6 @@ from typing import MutableMapping, MutableSequence -from google.api import httpbody_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore import proto # type: ignore @@ -42,8 +41,6 @@ "CreateTaskRequest", "DeleteTaskRequest", "RunTaskRequest", - "BufferTaskRequest", - "BufferTaskResponse", }, ) @@ -582,56 +579,4 @@ class RunTaskRequest(proto.Message): ) -class BufferTaskRequest(proto.Message): - r"""Request message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - - Attributes: - queue (str): - Required. The parent queue name. For example: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` - - The queue must already exist. - task_id (str): - Optional. Task ID for the task being created. - If not provided, a random task ID is assigned to - the task. - body (google.api.httpbody_pb2.HttpBody): - Optional. Body of the HTTP request. - - The body can take any generic value. The value is written to - the [HttpRequest][payload] of the [Task]. - """ - - queue: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=3, - message=httpbody_pb2.HttpBody, - ) - - -class BufferTaskResponse(proto.Message): - r"""Response message for - [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. - - Attributes: - task (google.cloud.tasks_v2beta3.types.Task): - The created task. - """ - - task: gct_task.Task = proto.Field( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/noxfile.py b/packages/google-cloud-tasks/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-tasks/noxfile.py +++ b/packages/google-cloud-tasks/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json index 43850228e47d..cd4c6cbd1588 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json index ac2a91f418b2..e46a2c49b733 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { @@ -174,183 +174,6 @@ ], "title": "cloudtasks_v2beta2_generated_cloud_tasks_acknowledge_task_sync.py" }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.tasks_v2beta2.CloudTasksAsyncClient", - "shortName": "CloudTasksAsyncClient" - }, - "fullName": "google.cloud.tasks_v2beta2.CloudTasksAsyncClient.buffer_task", - "method": { - "fullName": "google.cloud.tasks.v2beta2.CloudTasks.BufferTask", - "service": { - "fullName": "google.cloud.tasks.v2beta2.CloudTasks", - "shortName": "CloudTasks" - }, - "shortName": "BufferTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.tasks_v2beta2.types.BufferTaskRequest" - }, - { - "name": "queue", - "type": "str" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "body", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.tasks_v2beta2.types.BufferTaskResponse", - "shortName": "buffer_task" - }, - "description": "Sample for BufferTask", - "file": "cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudtasks_v2beta2_generated_CloudTasks_BufferTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.tasks_v2beta2.CloudTasksClient", - "shortName": "CloudTasksClient" - }, - "fullName": "google.cloud.tasks_v2beta2.CloudTasksClient.buffer_task", - "method": { - "fullName": "google.cloud.tasks.v2beta2.CloudTasks.BufferTask", - "service": { - "fullName": "google.cloud.tasks.v2beta2.CloudTasks", - "shortName": "CloudTasks" - }, - "shortName": "BufferTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.tasks_v2beta2.types.BufferTaskRequest" - }, - { - "name": "queue", - "type": "str" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "body", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.tasks_v2beta2.types.BufferTaskResponse", - "shortName": "buffer_task" - }, - "description": "Sample for BufferTask", - "file": "cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudtasks_v2beta2_generated_CloudTasks_BufferTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudtasks_v2beta2_generated_cloud_tasks_buffer_task_sync.py" - }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json index c0fc479ac9b2..440658e6f4aa 100644 --- a/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json +++ b/packages/google-cloud-tasks/samples/generated_samples/snippet_metadata_google.cloud.tasks.v2beta3.json @@ -8,186 +8,9 @@ ], "language": "PYTHON", "name": "google-cloud-tasks", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.tasks_v2beta3.CloudTasksAsyncClient", - "shortName": "CloudTasksAsyncClient" - }, - "fullName": "google.cloud.tasks_v2beta3.CloudTasksAsyncClient.buffer_task", - "method": { - "fullName": "google.cloud.tasks.v2beta3.CloudTasks.BufferTask", - "service": { - "fullName": "google.cloud.tasks.v2beta3.CloudTasks", - "shortName": "CloudTasks" - }, - "shortName": "BufferTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.tasks_v2beta3.types.BufferTaskRequest" - }, - { - "name": "queue", - "type": "str" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "body", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.tasks_v2beta3.types.BufferTaskResponse", - "shortName": "buffer_task" - }, - "description": "Sample for BufferTask", - "file": "cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudtasks_v2beta3_generated_CloudTasks_BufferTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.tasks_v2beta3.CloudTasksClient", - "shortName": "CloudTasksClient" - }, - "fullName": "google.cloud.tasks_v2beta3.CloudTasksClient.buffer_task", - "method": { - "fullName": "google.cloud.tasks.v2beta3.CloudTasks.BufferTask", - "service": { - "fullName": "google.cloud.tasks.v2beta3.CloudTasks", - "shortName": "CloudTasks" - }, - "shortName": "BufferTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.tasks_v2beta3.types.BufferTaskRequest" - }, - { - "name": "queue", - "type": "str" - }, - { - "name": "task_id", - "type": "str" - }, - { - "name": "body", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.tasks_v2beta3.types.BufferTaskResponse", - "shortName": "buffer_task" - }, - "description": "Sample for BufferTask", - "file": "cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudtasks_v2beta3_generated_CloudTasks_BufferTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudtasks_v2beta3_generated_cloud_tasks_buffer_task_sync.py" - }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py index a585e51b63f7..7f5ee1ba3356 100644 --- a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py +++ b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py @@ -40,7 +40,6 @@ class tasksCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'acknowledge_task': ('name', 'schedule_time', ), - 'buffer_task': ('queue', 'task_id', 'body', ), 'cancel_lease': ('name', 'schedule_time', 'response_view', ), 'create_queue': ('parent', 'queue', ), 'create_task': ('parent', 'task', 'response_view', ), diff --git a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py index d08dd32eaad6..815c58e3be93 100644 --- a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py +++ b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py @@ -39,7 +39,6 @@ def partition( class tasksCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'buffer_task': ('queue', 'task_id', 'body', ), 'create_queue': ('parent', 'queue', ), 'create_task': ('parent', 'task', 'response_view', ), 'delete_queue': ('name', ), diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py index 372d7d753840..985f1953958c 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -5919,254 +5919,6 @@ async def test_run_task_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - cloudtasks.BufferTaskRequest, - dict, - ], -) -def test_buffer_task(request_type, transport: str = "grpc"): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - response = client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -def test_buffer_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - client.buffer_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - -@pytest.mark.asyncio -async def test_buffer_task_async( - transport: str = "grpc_asyncio", request_type=cloudtasks.BufferTaskRequest -): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - response = await client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -@pytest.mark.asyncio -async def test_buffer_task_async_from_dict(): - await test_buffer_task_async(request_type=dict) - - -def test_buffer_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.BufferTaskRequest() - - request.queue = "queue_value" - request.task_id = "task_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - call.return_value = cloudtasks.BufferTaskResponse() - client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "queue=queue_value&task_id=task_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_buffer_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.BufferTaskRequest() - - request.queue = "queue_value" - request.task_id = "task_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - await client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "queue=queue_value&task_id=task_id_value", - ) in kw["metadata"] - - -def test_buffer_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.buffer_task( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].queue - mock_val = "queue_value" - assert arg == mock_val - arg = args[0].task_id - mock_val = "task_id_value" - assert arg == mock_val - arg = args[0].body - mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") - assert arg == mock_val - - -def test_buffer_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - -@pytest.mark.asyncio -async def test_buffer_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.buffer_task( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].queue - mock_val = "queue_value" - assert arg == mock_val - arg = args[0].task_id - mock_val = "task_id_value" - assert arg == mock_val - arg = args[0].body - mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_buffer_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - @pytest.mark.parametrize( "request_type", [ @@ -11998,284 +11750,6 @@ def test_run_task_rest_error(): ) -@pytest.mark.parametrize( - "request_type", - [ - cloudtasks.BufferTaskRequest, - dict, - ], -) -def test_buffer_task_rest(request_type): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.buffer_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -def test_buffer_task_rest_required_fields(request_type=cloudtasks.BufferTaskRequest): - transport_class = transports.CloudTasksRestTransport - - request_init = {} - request_init["queue"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).buffer_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["queue"] = "queue_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).buffer_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "queue" in jsonified_request - assert jsonified_request["queue"] == "queue_value" - - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.buffer_task(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_buffer_task_rest_unset_required_fields(): - transport = transports.CloudTasksRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.buffer_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("queue",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_buffer_task_rest_interceptors(null_interceptor): - transport = transports.CloudTasksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudTasksRestInterceptor(), - ) - client = CloudTasksClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudTasksRestInterceptor, "post_buffer_task" - ) as post, mock.patch.object( - transports.CloudTasksRestInterceptor, "pre_buffer_task" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudtasks.BufferTaskRequest.pb(cloudtasks.BufferTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudtasks.BufferTaskResponse.to_json( - cloudtasks.BufferTaskResponse() - ) - - request = cloudtasks.BufferTaskRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudtasks.BufferTaskResponse() - - client.buffer_task( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_buffer_task_rest_bad_request( - transport: str = "rest", request_type=cloudtasks.BufferTaskRequest -): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.buffer_task(request) - - -def test_buffer_task_rest_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - - # get truthy value for each flattened field - mock_args = dict( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.buffer_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta2/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer" - % client.transport._host, - args[1], - ) - - -def test_buffer_task_rest_flattened_error(transport: str = "rest"): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - -def test_buffer_task_rest_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_upload_queue_yaml_rest_error(): client = CloudTasksClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12449,7 +11923,6 @@ def test_cloud_tasks_base_transport(): "renew_lease", "cancel_lease", "run_task", - "buffer_task", "get_location", "list_locations", ) @@ -12770,9 +12243,6 @@ def test_cloud_tasks_client_transport_session_collision(transport_name): session1 = client1.transport.run_task._session session2 = client2.transport.run_task._session assert session1 != session2 - session1 = client1.transport.buffer_task._session - session2 = client2.transport.buffer_task._session - assert session1 != session2 def test_cloud_tasks_grpc_transport_channel(): diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py index 4ac80ca80dd3..5c54ac41d9d1 100644 --- a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -26,7 +26,6 @@ import json import math -from google.api import httpbody_pb2 # type: ignore from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -4920,254 +4919,6 @@ async def test_run_task_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - cloudtasks.BufferTaskRequest, - dict, - ], -) -def test_buffer_task(request_type, transport: str = "grpc"): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - response = client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -def test_buffer_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - client.buffer_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - -@pytest.mark.asyncio -async def test_buffer_task_async( - transport: str = "grpc_asyncio", request_type=cloudtasks.BufferTaskRequest -): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - response = await client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.BufferTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -@pytest.mark.asyncio -async def test_buffer_task_async_from_dict(): - await test_buffer_task_async(request_type=dict) - - -def test_buffer_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.BufferTaskRequest() - - request.queue = "queue_value" - request.task_id = "task_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - call.return_value = cloudtasks.BufferTaskResponse() - client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "queue=queue_value&task_id=task_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_buffer_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.BufferTaskRequest() - - request.queue = "queue_value" - request.task_id = "task_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - await client.buffer_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "queue=queue_value&task_id=task_id_value", - ) in kw["metadata"] - - -def test_buffer_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.buffer_task( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].queue - mock_val = "queue_value" - assert arg == mock_val - arg = args[0].task_id - mock_val = "task_id_value" - assert arg == mock_val - arg = args[0].body - mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") - assert arg == mock_val - - -def test_buffer_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - -@pytest.mark.asyncio -async def test_buffer_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.BufferTaskResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudtasks.BufferTaskResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.buffer_task( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].queue - mock_val = "queue_value" - assert arg == mock_val - arg = args[0].task_id - mock_val = "task_id_value" - assert arg == mock_val - arg = args[0].body - mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_buffer_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - @pytest.mark.parametrize( "request_type", [ @@ -9897,284 +9648,6 @@ def test_run_task_rest_error(): ) -@pytest.mark.parametrize( - "request_type", - [ - cloudtasks.BufferTaskRequest, - dict, - ], -) -def test_buffer_task_rest(request_type): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.buffer_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.BufferTaskResponse) - - -def test_buffer_task_rest_required_fields(request_type=cloudtasks.BufferTaskRequest): - transport_class = transports.CloudTasksRestTransport - - request_init = {} - request_init["queue"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).buffer_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["queue"] = "queue_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).buffer_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "queue" in jsonified_request - assert jsonified_request["queue"] == "queue_value" - - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.buffer_task(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_buffer_task_rest_unset_required_fields(): - transport = transports.CloudTasksRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.buffer_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("queue",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_buffer_task_rest_interceptors(null_interceptor): - transport = transports.CloudTasksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudTasksRestInterceptor(), - ) - client = CloudTasksClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudTasksRestInterceptor, "post_buffer_task" - ) as post, mock.patch.object( - transports.CloudTasksRestInterceptor, "pre_buffer_task" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudtasks.BufferTaskRequest.pb(cloudtasks.BufferTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudtasks.BufferTaskResponse.to_json( - cloudtasks.BufferTaskResponse() - ) - - request = cloudtasks.BufferTaskRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudtasks.BufferTaskResponse() - - client.buffer_task( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_buffer_task_rest_bad_request( - transport: str = "rest", request_type=cloudtasks.BufferTaskRequest -): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.buffer_task(request) - - -def test_buffer_task_rest_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloudtasks.BufferTaskResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "queue": "projects/sample1/locations/sample2/queues/sample3", - "task_id": "sample4", - } - - # get truthy value for each flattened field - mock_args = dict( - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloudtasks.BufferTaskResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.buffer_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta3/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer" - % client.transport._host, - args[1], - ) - - -def test_buffer_task_rest_flattened_error(transport: str = "rest"): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.buffer_task( - cloudtasks.BufferTaskRequest(), - queue="queue_value", - task_id="task_id_value", - body=httpbody_pb2.HttpBody(content_type="content_type_value"), - ) - - -def test_buffer_task_rest_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudTasksGrpcTransport( @@ -10330,7 +9803,6 @@ def test_cloud_tasks_base_transport(): "create_task", "delete_task", "run_task", - "buffer_task", "get_location", "list_locations", ) @@ -10636,9 +10108,6 @@ def test_cloud_tasks_client_transport_session_collision(transport_name): session1 = client1.transport.run_task._session session2 = client2.transport.run_task._session assert session1 != session2 - session1 = client1.transport.buffer_task._session - session2 = client2.transport.buffer_task._session - assert session1 != session2 def test_cloud_tasks_grpc_transport_channel(): diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md index 65bee2e7559b..c8c7df62c524 100644 --- a/packages/google-cloud-texttospeech/CHANGELOG.md +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-texttospeech/#history +## [2.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.15.0...google-cloud-texttospeech-v2.15.1) (2024-01-04) + + +### Bug Fixes + +* correct long audio synthesis HTTP binding ([a748565](https://github.com/googleapis/google-cloud-python/commit/a7485655fe09ecd85198c383fe818acd1ef677bc)) + + +### Documentation + +* Deprecate the custom voice usage field ([a748565](https://github.com/googleapis/google-cloud-python/commit/a7485655fe09ecd85198c383fe818acd1ef677bc)) + ## [2.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.14.2...google-cloud-texttospeech-v2.15.0) (2023-12-07) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py index 70b335ed53e8..f1bdd96a8baa 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py @@ -441,6 +441,114 @@ async def sample_synthesize_speech(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "TextToSpeechAsyncClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py index 8fcfc6b4de35..8d4ca74e90fc 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py @@ -688,6 +688,114 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py index 5b4fc7f865e0..3d3249202edf 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py @@ -165,6 +165,27 @@ def synthesize_speech( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py index c2642c2ac8ab..031eb1c843be 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py @@ -287,6 +287,42 @@ def synthesize_speech( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py index ae4e13695141..94d9db074b46 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py @@ -293,5 +293,41 @@ def synthesize_speech( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("TextToSpeechGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py index c24a1e4ef72f..6dd6deb75a17 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py @@ -131,6 +131,52 @@ def post_synthesize_speech( """ return response + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TextToSpeechRestStub: @@ -427,6 +473,138 @@ def synthesize_speech( # In C++ this would require a dynamic_cast return self._SynthesizeSpeech(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TextToSpeechRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TextToSpeechRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py index a422abdee272..3fa08f438757 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -339,6 +339,114 @@ async def sample_synthesize_long_audio(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "TextToSpeechLongAudioSynthesizeAsyncClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py index 94c606df3dba..1fd67a45319e 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py @@ -580,6 +580,114 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py index 7f7afe86ce28..5e57258e151f 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py @@ -153,6 +153,27 @@ def synthesize_long_audio( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py index af0c0a1c4a75..009848d54423 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py @@ -275,6 +275,42 @@ def synthesize_long_audio( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py index fcf193c4d040..e08db447c850 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py @@ -282,5 +282,41 @@ def synthesize_long_audio( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py index 541e8ddfda69..adfc026d1987 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py @@ -108,6 +108,52 @@ def post_synthesize_long_audio( """ return response + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TextToSpeechLongAudioSynthesizeRestStub: @@ -226,7 +272,20 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: """ # Only create a new client if we do not already have one. if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = {} + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } rest_transport = operations_v1.OperationsRestTransport( host=self._host, @@ -289,7 +348,7 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/voices/*}:SynthesizeLongAudio", + "uri": "/v1/{parent=projects/*/locations/*}:synthesizeLongAudio", "body": "*", }, ] @@ -349,6 +408,138 @@ def synthesize_long_audio( # In C++ this would require a dynamic_cast return self._SynthesizeLongAudio(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TextToSpeechLongAudioSynthesizeRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TextToSpeechLongAudioSynthesizeRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py index 82cd02c11168..da14f1fd62e2 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py @@ -391,14 +391,13 @@ class CustomVoiceParams(proto.Message): Required. The name of the AutoML model that synthesizes the custom voice. reported_usage (google.cloud.texttospeech_v1.types.CustomVoiceParams.ReportedUsage): - Optional. The usage of the synthesized audio - to be reported. + Optional. Deprecated. The usage of the + synthesized audio to be reported. """ class ReportedUsage(proto.Enum): - r"""The usage of the synthesized audio. You must report your - honest and correct usage of the service as it's regulated by - contract and will cause significant difference in billing. + r"""Deprecated. The usage of the synthesized audio. Usage does + not affect billing. Values: REPORTED_USAGE_UNSPECIFIED (0): diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py index b23db84f3acf..c83a5189c205 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py @@ -97,7 +97,7 @@ class SynthesizeLongAudioMetadata(proto.Message): start_time (google.protobuf.timestamp_pb2.Timestamp): Time when the request was received. last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time of the most recent processing update. + Deprecated. Do not use. progress_percentage (float): The progress of the most recent processing update in percentage, ie. 70.0%. diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 209de14827c9..b3854bf4a586 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py index 32446bd0e256..f83593680190 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py @@ -441,6 +441,114 @@ async def sample_synthesize_speech(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "TextToSpeechAsyncClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py index 18deb40699d7..3b6354e46e2d 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py @@ -688,6 +688,114 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py index 8cac0d61316b..25b7f43738cf 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py @@ -165,6 +165,27 @@ def synthesize_speech( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py index cfd0258047b3..785977d6e922 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py @@ -287,6 +287,42 @@ def synthesize_speech( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py index 086d094b33f1..40fc058114bb 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py @@ -293,5 +293,41 @@ def synthesize_speech( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("TextToSpeechGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py index cf1f517b709e..e8a85912cc5f 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py @@ -131,6 +131,52 @@ def post_synthesize_speech( """ return response + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TextToSpeechRestStub: @@ -427,6 +473,138 @@ def synthesize_speech( # In C++ this would require a dynamic_cast return self._SynthesizeSpeech(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TextToSpeechRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TextToSpeechRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py index 18e973db0396..b4ab5f6ed08e 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -339,6 +339,114 @@ async def sample_synthesize_long_audio(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "TextToSpeechLongAudioSynthesizeAsyncClient": return self diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py index 4a55924e1749..4d39c29ea23e 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py @@ -580,6 +580,114 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py index 70bfcfe8fd09..ae0837de9161 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py @@ -153,6 +153,27 @@ def synthesize_long_audio( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py index 876b080a9876..647c6e2ef264 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py @@ -275,6 +275,42 @@ def synthesize_long_audio( def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py index 040c809e0f32..8d0739949285 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py @@ -282,5 +282,41 @@ def synthesize_long_audio( def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ("TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py index 86ecb24a5e07..1093cd749f7b 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py @@ -108,6 +108,52 @@ def post_synthesize_long_audio( """ return response + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TextToSpeechLongAudioSynthesizeRestStub: @@ -226,7 +272,20 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: """ # Only create a new client if we do not already have one. if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = {} + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}/operations", + }, + ], + } rest_transport = operations_v1.OperationsRestTransport( host=self._host, @@ -289,7 +348,7 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1beta1/{parent=projects/*/locations/*/voices/*}:SynthesizeLongAudio", + "uri": "/v1beta1/{parent=projects/*/locations/*}:synthesizeLongAudio", "body": "*", }, ] @@ -349,6 +408,138 @@ def synthesize_long_audio( # In C++ this would require a dynamic_cast return self._SynthesizeLongAudio(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TextToSpeechLongAudioSynthesizeRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TextToSpeechLongAudioSynthesizeRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py index 7c001188c90e..c13da582e026 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py @@ -418,14 +418,13 @@ class CustomVoiceParams(proto.Message): Required. The name of the AutoML model that synthesizes the custom voice. reported_usage (google.cloud.texttospeech_v1beta1.types.CustomVoiceParams.ReportedUsage): - Optional. The usage of the synthesized audio - to be reported. + Optional. Deprecated. The usage of the + synthesized audio to be reported. """ class ReportedUsage(proto.Enum): - r"""The usage of the synthesized audio. You must report your - honest and correct usage of the service as it's regulated by - contract and will cause significant difference in billing. + r"""Deprecated. The usage of the synthesized audio. Usage does + not affect billing. Values: REPORTED_USAGE_UNSPECIFIED (0): diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py index f5a2289e26e1..1b44e520d84a 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py @@ -97,7 +97,7 @@ class SynthesizeLongAudioMetadata(proto.Message): start_time (google.protobuf.timestamp_pb2.Timestamp): Time when the request was received. last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time of the most recent processing update. + Deprecated. Do not use. progress_percentage (float): The progress of the most recent processing update in percentage, ie. 70.0%. diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index 3374ba0c531d..0cea6b6df8aa 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index 328600954882..538678629a5b 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py index 41246a36955d..1b7602e01a24 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py @@ -1665,6 +1665,8 @@ def test_text_to_speech_base_transport(): methods = ( "list_voices", "synthesize_speech", + "get_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2212,6 +2214,412 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py index c531cca2c9c2..6ae6f07675ad 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py @@ -944,7 +944,7 @@ def test_synthesize_long_audio_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1130,7 +1130,7 @@ def test_synthesize_long_audio_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1291,7 +1291,11 @@ def test_text_to_speech_long_audio_synthesize_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. - methods = ("synthesize_long_audio",) + methods = ( + "synthesize_long_audio", + "get_operation", + "list_operations", + ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -1923,6 +1927,412 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py index 07072a2e6929..867053a65c0a 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py @@ -1667,6 +1667,8 @@ def test_text_to_speech_base_transport(): methods = ( "list_voices", "synthesize_speech", + "get_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2214,6 +2216,412 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py index be0f0e433518..cc64688e7d25 100644 --- a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py @@ -944,7 +944,7 @@ def test_synthesize_long_audio_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1130,7 +1130,7 @@ def test_synthesize_long_audio_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1291,7 +1291,11 @@ def test_text_to_speech_long_audio_synthesize_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. - methods = ("synthesize_long_audio",) + methods = ( + "synthesize_long_audio", + "get_operation", + "list_operations", + ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -1923,6 +1927,412 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/google-cloud-translate/CHANGELOG.md b/packages/google-cloud-translate/CHANGELOG.md index 3a9d8d279b30..f3e08ca0754e 100644 --- a/packages/google-cloud-translate/CHANGELOG.md +++ b/packages/google-cloud-translate/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-translate/#history +## [3.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-translate-v3.13.0...google-cloud-translate-v3.14.0) (2024-01-04) + + +### Features + +* Add Adaptive MT API ([e063fa9](https://github.com/googleapis/google-cloud-python/commit/e063fa9be2b01c694c14d4fdc52ca6ec98d685f3)) + + +### Documentation + +* Fixes a typo in docs ([e063fa9](https://github.com/googleapis/google-cloud-python/commit/e063fa9be2b01c694c14d4fdc52ca6ec98d685f3)) + ## [3.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-translate-v3.12.1...google-cloud-translate-v3.13.0) (2023-12-07) diff --git a/packages/google-cloud-translate/google/cloud/translate/__init__.py b/packages/google-cloud-translate/google/cloud/translate/__init__.py index ec5f62191712..c29d918b5c30 100644 --- a/packages/google-cloud-translate/google/cloud/translate/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate/__init__.py @@ -24,6 +24,32 @@ from google.cloud.translate_v3.services.translation_service.client import ( TranslationServiceClient, ) +from google.cloud.translate_v3.types.adaptive_mt import ( + AdaptiveMtDataset, + AdaptiveMtFile, + AdaptiveMtSentence, + AdaptiveMtTranslateRequest, + AdaptiveMtTranslateResponse, + AdaptiveMtTranslation, + CreateAdaptiveMtDatasetRequest, + DeleteAdaptiveMtDatasetRequest, + DeleteAdaptiveMtFileRequest, + GetAdaptiveMtDatasetRequest, + GetAdaptiveMtFileRequest, + ImportAdaptiveMtFileRequest, + ImportAdaptiveMtFileResponse, + ListAdaptiveMtDatasetsRequest, + ListAdaptiveMtDatasetsResponse, + ListAdaptiveMtFilesRequest, + ListAdaptiveMtFilesResponse, + ListAdaptiveMtSentencesRequest, + ListAdaptiveMtSentencesResponse, +) +from google.cloud.translate_v3.types.common import ( + FileInputSource, + GcsInputSource, + GcsOutputDestination, +) from google.cloud.translate_v3.types.translation_service import ( BatchDocumentInputConfig, BatchDocumentOutputConfig, @@ -67,6 +93,28 @@ __all__ = ( "TranslationServiceClient", "TranslationServiceAsyncClient", + "AdaptiveMtDataset", + "AdaptiveMtFile", + "AdaptiveMtSentence", + "AdaptiveMtTranslateRequest", + "AdaptiveMtTranslateResponse", + "AdaptiveMtTranslation", + "CreateAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtFileRequest", + "GetAdaptiveMtDatasetRequest", + "GetAdaptiveMtFileRequest", + "ImportAdaptiveMtFileRequest", + "ImportAdaptiveMtFileResponse", + "ListAdaptiveMtDatasetsRequest", + "ListAdaptiveMtDatasetsResponse", + "ListAdaptiveMtFilesRequest", + "ListAdaptiveMtFilesResponse", + "ListAdaptiveMtSentencesRequest", + "ListAdaptiveMtSentencesResponse", + "FileInputSource", + "GcsInputSource", + "GcsOutputDestination", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", "BatchTranslateDocumentMetadata", diff --git a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py index 82156347f3eb..cd6578cfc6b0 100644 --- a/packages/google-cloud-translate/google/cloud/translate/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.13.0" # {x-release-please-version} +__version__ = "3.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v2/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v2/__init__.py index 654d187a1330..686456cd1bf7 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v2/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v2/__init__.py @@ -16,7 +16,6 @@ from google.cloud.translate import __version__ - from google.cloud.translate_v2.client import Client __all__ = ("__version__", "Client") diff --git a/packages/google-cloud-translate/google/cloud/translate_v2/client.py b/packages/google-cloud-translate/google/cloud/translate_v2/client.py index 4d5f37ac9b60..e329334dc222 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v2/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v2/client.py @@ -17,6 +17,7 @@ import google.api_core.client_options from google.cloud.client import Client as BaseClient + from google.cloud.translate_v2._http import Connection ENGLISH_ISO_639 = "en" diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py index b32fbb77db77..ea1d4d36eaa5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/__init__.py @@ -22,6 +22,28 @@ TranslationServiceAsyncClient, TranslationServiceClient, ) +from .types.adaptive_mt import ( + AdaptiveMtDataset, + AdaptiveMtFile, + AdaptiveMtSentence, + AdaptiveMtTranslateRequest, + AdaptiveMtTranslateResponse, + AdaptiveMtTranslation, + CreateAdaptiveMtDatasetRequest, + DeleteAdaptiveMtDatasetRequest, + DeleteAdaptiveMtFileRequest, + GetAdaptiveMtDatasetRequest, + GetAdaptiveMtFileRequest, + ImportAdaptiveMtFileRequest, + ImportAdaptiveMtFileResponse, + ListAdaptiveMtDatasetsRequest, + ListAdaptiveMtDatasetsResponse, + ListAdaptiveMtFilesRequest, + ListAdaptiveMtFilesResponse, + ListAdaptiveMtSentencesRequest, + ListAdaptiveMtSentencesResponse, +) +from .types.common import FileInputSource, GcsInputSource, GcsOutputDestination from .types.translation_service import ( BatchDocumentInputConfig, BatchDocumentOutputConfig, @@ -64,6 +86,12 @@ __all__ = ( "TranslationServiceAsyncClient", + "AdaptiveMtDataset", + "AdaptiveMtFile", + "AdaptiveMtSentence", + "AdaptiveMtTranslateRequest", + "AdaptiveMtTranslateResponse", + "AdaptiveMtTranslation", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", "BatchTranslateDocumentMetadata", @@ -72,8 +100,11 @@ "BatchTranslateMetadata", "BatchTranslateResponse", "BatchTranslateTextRequest", + "CreateAdaptiveMtDatasetRequest", "CreateGlossaryMetadata", "CreateGlossaryRequest", + "DeleteAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtFileRequest", "DeleteGlossaryMetadata", "DeleteGlossaryRequest", "DeleteGlossaryResponse", @@ -83,13 +114,26 @@ "DocumentInputConfig", "DocumentOutputConfig", "DocumentTranslation", + "FileInputSource", "GcsDestination", + "GcsInputSource", + "GcsOutputDestination", "GcsSource", + "GetAdaptiveMtDatasetRequest", + "GetAdaptiveMtFileRequest", "GetGlossaryRequest", "GetSupportedLanguagesRequest", "Glossary", "GlossaryInputConfig", + "ImportAdaptiveMtFileRequest", + "ImportAdaptiveMtFileResponse", "InputConfig", + "ListAdaptiveMtDatasetsRequest", + "ListAdaptiveMtDatasetsResponse", + "ListAdaptiveMtFilesRequest", + "ListAdaptiveMtFilesResponse", + "ListAdaptiveMtSentencesRequest", + "ListAdaptiveMtSentencesResponse", "ListGlossariesRequest", "ListGlossariesResponse", "OutputConfig", diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json index b61ab661417e..4f341c6ab80c 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json +++ b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "TranslationServiceClient", "rpcs": { + "AdaptiveMtTranslate": { + "methods": [ + "adaptive_mt_translate" + ] + }, "BatchTranslateDocument": { "methods": [ "batch_translate_document" @@ -20,11 +25,26 @@ "batch_translate_text" ] }, + "CreateAdaptiveMtDataset": { + "methods": [ + "create_adaptive_mt_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "DeleteAdaptiveMtDataset": { + "methods": [ + "delete_adaptive_mt_dataset" + ] + }, + "DeleteAdaptiveMtFile": { + "methods": [ + "delete_adaptive_mt_file" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" @@ -35,6 +55,16 @@ "detect_language" ] }, + "GetAdaptiveMtDataset": { + "methods": [ + "get_adaptive_mt_dataset" + ] + }, + "GetAdaptiveMtFile": { + "methods": [ + "get_adaptive_mt_file" + ] + }, "GetGlossary": { "methods": [ "get_glossary" @@ -45,6 +75,26 @@ "get_supported_languages" ] }, + "ImportAdaptiveMtFile": { + "methods": [ + "import_adaptive_mt_file" + ] + }, + "ListAdaptiveMtDatasets": { + "methods": [ + "list_adaptive_mt_datasets" + ] + }, + "ListAdaptiveMtFiles": { + "methods": [ + "list_adaptive_mt_files" + ] + }, + "ListAdaptiveMtSentences": { + "methods": [ + "list_adaptive_mt_sentences" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" @@ -65,6 +115,11 @@ "grpc-async": { "libraryClient": "TranslationServiceAsyncClient", "rpcs": { + "AdaptiveMtTranslate": { + "methods": [ + "adaptive_mt_translate" + ] + }, "BatchTranslateDocument": { "methods": [ "batch_translate_document" @@ -75,11 +130,26 @@ "batch_translate_text" ] }, + "CreateAdaptiveMtDataset": { + "methods": [ + "create_adaptive_mt_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "DeleteAdaptiveMtDataset": { + "methods": [ + "delete_adaptive_mt_dataset" + ] + }, + "DeleteAdaptiveMtFile": { + "methods": [ + "delete_adaptive_mt_file" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" @@ -90,6 +160,16 @@ "detect_language" ] }, + "GetAdaptiveMtDataset": { + "methods": [ + "get_adaptive_mt_dataset" + ] + }, + "GetAdaptiveMtFile": { + "methods": [ + "get_adaptive_mt_file" + ] + }, "GetGlossary": { "methods": [ "get_glossary" @@ -100,6 +180,26 @@ "get_supported_languages" ] }, + "ImportAdaptiveMtFile": { + "methods": [ + "import_adaptive_mt_file" + ] + }, + "ListAdaptiveMtDatasets": { + "methods": [ + "list_adaptive_mt_datasets" + ] + }, + "ListAdaptiveMtFiles": { + "methods": [ + "list_adaptive_mt_files" + ] + }, + "ListAdaptiveMtSentences": { + "methods": [ + "list_adaptive_mt_sentences" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" @@ -120,6 +220,11 @@ "rest": { "libraryClient": "TranslationServiceClient", "rpcs": { + "AdaptiveMtTranslate": { + "methods": [ + "adaptive_mt_translate" + ] + }, "BatchTranslateDocument": { "methods": [ "batch_translate_document" @@ -130,11 +235,26 @@ "batch_translate_text" ] }, + "CreateAdaptiveMtDataset": { + "methods": [ + "create_adaptive_mt_dataset" + ] + }, "CreateGlossary": { "methods": [ "create_glossary" ] }, + "DeleteAdaptiveMtDataset": { + "methods": [ + "delete_adaptive_mt_dataset" + ] + }, + "DeleteAdaptiveMtFile": { + "methods": [ + "delete_adaptive_mt_file" + ] + }, "DeleteGlossary": { "methods": [ "delete_glossary" @@ -145,6 +265,16 @@ "detect_language" ] }, + "GetAdaptiveMtDataset": { + "methods": [ + "get_adaptive_mt_dataset" + ] + }, + "GetAdaptiveMtFile": { + "methods": [ + "get_adaptive_mt_file" + ] + }, "GetGlossary": { "methods": [ "get_glossary" @@ -155,6 +285,26 @@ "get_supported_languages" ] }, + "ImportAdaptiveMtFile": { + "methods": [ + "import_adaptive_mt_file" + ] + }, + "ListAdaptiveMtDatasets": { + "methods": [ + "list_adaptive_mt_datasets" + ] + }, + "ListAdaptiveMtFiles": { + "methods": [ + "list_adaptive_mt_files" + ] + }, + "ListAdaptiveMtSentences": { + "methods": [ + "list_adaptive_mt_sentences" + ] + }, "ListGlossaries": { "methods": [ "list_glossaries" diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py index 82156347f3eb..cd6578cfc6b0 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.13.0" # {x-release-please-version} +__version__ = "3.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py index be7401517ecb..b17d48a58e20 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/async_client.py @@ -33,9 +33,10 @@ from google.api_core import retry_async as retries from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.translate_v3 import gapic_version as package_version from google.oauth2 import service_account # type: ignore +from google.cloud.translate_v3 import gapic_version as package_version + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -43,10 +44,11 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.translate_v3.services.translation_service import pagers -from google.cloud.translate_v3.types import translation_service from google.protobuf import timestamp_pb2 # type: ignore +from google.cloud.translate_v3.services.translation_service import pagers +from google.cloud.translate_v3.types import adaptive_mt, translation_service + from .client import TranslationServiceClient from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .transports.grpc_asyncio import TranslationServiceGrpcAsyncIOTransport @@ -60,6 +62,22 @@ class TranslationServiceAsyncClient: DEFAULT_ENDPOINT = TranslationServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = TranslationServiceClient.DEFAULT_MTLS_ENDPOINT + adaptive_mt_dataset_path = staticmethod( + TranslationServiceClient.adaptive_mt_dataset_path + ) + parse_adaptive_mt_dataset_path = staticmethod( + TranslationServiceClient.parse_adaptive_mt_dataset_path + ) + adaptive_mt_file_path = staticmethod(TranslationServiceClient.adaptive_mt_file_path) + parse_adaptive_mt_file_path = staticmethod( + TranslationServiceClient.parse_adaptive_mt_file_path + ) + adaptive_mt_sentence_path = staticmethod( + TranslationServiceClient.adaptive_mt_sentence_path + ) + parse_adaptive_mt_sentence_path = staticmethod( + TranslationServiceClient.parse_adaptive_mt_sentence_path + ) glossary_path = staticmethod(TranslationServiceClient.glossary_path) parse_glossary_path = staticmethod(TranslationServiceClient.parse_glossary_path) common_billing_account_path = staticmethod( @@ -1648,6 +1666,1091 @@ async def sample_delete_glossary(): # Done; return the response. return response + async def create_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Creates an Adaptive MT dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_create_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, + ) + + # Make the request + response = await client.create_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for creating an + AdaptiveMtDataset. + parent (:class:`str`): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + adaptive_mt_dataset (:class:`google.cloud.translate_v3.types.AdaptiveMtDataset`): + Required. The AdaptiveMtDataset to be + created. + + This corresponds to the ``adaptive_mt_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, adaptive_mt_dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if adaptive_mt_dataset is not None: + request.adaptive_mt_dataset = adaptive_mt_dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_adaptive_mt_dataset, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_dataset(request=request) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for deleting an + AdaptiveMtDataset. + name (:class:`str`): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_adaptive_mt_dataset, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_adaptive_mt_dataset( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Gets the Adaptive MT dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]]): + The request object. Request message for getting an + Adaptive MT dataset. + name (:class:`str`): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_adaptive_mt_dataset, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_datasets( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtDatasetsAsyncPager: + r"""Lists all Adaptive MT datasets for which the caller + has read permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]]): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT datasets. + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager: + A list of AdaptiveMtDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_adaptive_mt_datasets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtDatasetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def adaptive_mt_translate( + self, + request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + r"""Translate text using Adaptive MT. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = await client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]]): + The request object. The request for sending an AdaptiveMt + translation query. + parent (:class:`str`): + Required. Location to make a regional call. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`MutableSequence[str]`): + Required. The content of the input in + string format. For now only one sentence + per request is supported. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: + An AdaptiveMtTranslate response. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.AdaptiveMtTranslateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content: + request.content.extend(content) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.adaptive_mt_translate, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtFile: + r"""Gets and AdaptiveMtFile + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]]): + The request object. The request for getting an + AdaptiveMtFile. + name (:class:`str`): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtFile: + An AdaptiveMtFile. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.GetAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_adaptive_mt_file, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdaptiveMtFile along with its sentences. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_file(request=request) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]]): + The request object. The request for deleting an + AdaptiveMt file. + name (:class:`str`): + Required. The resource name of the file to delete, in + form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_adaptive_mt_file, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def import_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]]): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. + parent (:class:`str`): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_adaptive_mt_file, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_files( + self, + request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtFilesAsyncPager: + r"""Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]]): + The request object. The request to list all AdaptiveMt + files under a given dataset. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT files. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager: + The response for listing all + AdaptiveMt files under a given dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_adaptive_mt_files, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtFilesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_adaptive_mt_sentences( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtSentencesAsyncPager: + r"""Lists all AdaptiveMtSentences under a given + file/dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + async def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]]): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + parent (:class:`str`): + Required. The resource name of the project from which to + list the Adaptive MT files. The following format lists + all sentences under a file. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + The following format lists all sentences within a + dataset. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager: + List AdaptiveMt sentences response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_adaptive_mt_sentences, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAdaptiveMtSentencesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "TranslationServiceAsyncClient": return self diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py index 1d4f4310f983..9f6576a9f6b9 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/client.py @@ -37,9 +37,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3 import gapic_version as package_version from google.oauth2 import service_account # type: ignore +from google.cloud.translate_v3 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -47,10 +48,11 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.translate_v3.services.translation_service import pagers -from google.cloud.translate_v3.types import translation_service from google.protobuf import timestamp_pb2 # type: ignore +from google.cloud.translate_v3.services.translation_service import pagers +from google.cloud.translate_v3.types import adaptive_mt, translation_service + from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .transports.grpc import TranslationServiceGrpcTransport from .transports.grpc_asyncio import TranslationServiceGrpcAsyncIOTransport @@ -179,6 +181,78 @@ def transport(self) -> TranslationServiceTransport: """ return self._transport + @staticmethod + def adaptive_mt_dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: + """Returns a fully-qualified adaptive_mt_dataset string.""" + return "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}".format( + project=project, + location=location, + dataset=dataset, + ) + + @staticmethod + def parse_adaptive_mt_dataset_path(path: str) -> Dict[str, str]: + """Parses a adaptive_mt_dataset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/adaptiveMtDatasets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def adaptive_mt_file_path( + project: str, + location: str, + dataset: str, + file: str, + ) -> str: + """Returns a fully-qualified adaptive_mt_file string.""" + return "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}".format( + project=project, + location=location, + dataset=dataset, + file=file, + ) + + @staticmethod + def parse_adaptive_mt_file_path(path: str) -> Dict[str, str]: + """Parses a adaptive_mt_file path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/adaptiveMtDatasets/(?P.+?)/adaptiveMtFiles/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def adaptive_mt_sentence_path( + project: str, + location: str, + dataset: str, + file: str, + sentence: str, + ) -> str: + """Returns a fully-qualified adaptive_mt_sentence string.""" + return "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}/adaptiveMtSentences/{sentence}".format( + project=project, + location=location, + dataset=dataset, + file=file, + sentence=sentence, + ) + + @staticmethod + def parse_adaptive_mt_sentence_path(path: str) -> Dict[str, str]: + """Parses a adaptive_mt_sentence path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/adaptiveMtDatasets/(?P.+?)/adaptiveMtFiles/(?P.+?)/adaptiveMtSentences/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def glossary_path( project: str, @@ -1838,6 +1912,1099 @@ def sample_delete_glossary(): # Done; return the response. return response + def create_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.CreateAdaptiveMtDatasetRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + adaptive_mt_dataset: Optional[adaptive_mt.AdaptiveMtDataset] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Creates an Adaptive MT dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_create_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, + ) + + # Make the request + response = client.create_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest, dict]): + The request object. Request message for creating an + AdaptiveMtDataset. + parent (str): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + adaptive_mt_dataset (google.cloud.translate_v3.types.AdaptiveMtDataset): + Required. The AdaptiveMtDataset to be + created. + + This corresponds to the ``adaptive_mt_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, adaptive_mt_dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.CreateAdaptiveMtDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.CreateAdaptiveMtDatasetRequest): + request = adaptive_mt.CreateAdaptiveMtDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if adaptive_mt_dataset is not None: + request.adaptive_mt_dataset = adaptive_mt_dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_adaptive_mt_dataset + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_adaptive_mt_dataset( + self, + request: Optional[ + Union[adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_dataset(request=request) + + Args: + request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest, dict]): + The request object. Request message for deleting an + AdaptiveMtDataset. + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.DeleteAdaptiveMtDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtDatasetRequest): + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_adaptive_mt_dataset + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_adaptive_mt_dataset( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtDatasetRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Gets the Adaptive MT dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest, dict]): + The request object. Request message for getting an + Adaptive MT dataset. + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.GetAdaptiveMtDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.GetAdaptiveMtDatasetRequest): + request = adaptive_mt.GetAdaptiveMtDatasetRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_datasets( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtDatasetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtDatasetsPager: + r"""Lists all Adaptive MT datasets for which the caller + has read permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest, dict]): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT datasets. + ``projects/{project-number-or-id}/locations/{location-id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager: + A list of AdaptiveMtDatasets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.ListAdaptiveMtDatasetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.ListAdaptiveMtDatasetsRequest): + request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_adaptive_mt_datasets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtDatasetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def adaptive_mt_translate( + self, + request: Optional[Union[adaptive_mt.AdaptiveMtTranslateRequest, dict]] = None, + *, + parent: Optional[str] = None, + content: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + r"""Translate text using Adaptive MT. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.AdaptiveMtTranslateRequest, dict]): + The request object. The request for sending an AdaptiveMt + translation query. + parent (str): + Required. Location to make a regional call. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (MutableSequence[str]): + Required. The content of the input in + string format. For now only one sentence + per request is supported. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtTranslateResponse: + An AdaptiveMtTranslate response. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.AdaptiveMtTranslateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.AdaptiveMtTranslateRequest): + request = adaptive_mt.AdaptiveMtTranslateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.adaptive_mt_translate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.GetAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtFile: + r"""Gets and AdaptiveMtFile + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.GetAdaptiveMtFileRequest, dict]): + The request object. The request for getting an + AdaptiveMtFile. + name (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.AdaptiveMtFile: + An AdaptiveMtFile. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.GetAdaptiveMtFileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.GetAdaptiveMtFileRequest): + request = adaptive_mt.GetAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.DeleteAdaptiveMtFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an AdaptiveMtFile along with its sentences. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_file(request=request) + + Args: + request (Union[google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest, dict]): + The request object. The request for deleting an + AdaptiveMt file. + name (str): + Required. The resource name of the file to delete, in + form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.DeleteAdaptiveMtFileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.DeleteAdaptiveMtFileRequest): + request = adaptive_mt.DeleteAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def import_adaptive_mt_file( + self, + request: Optional[Union[adaptive_mt.ImportAdaptiveMtFileRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest, dict]): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. + parent (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.ImportAdaptiveMtFileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.ImportAdaptiveMtFileRequest): + request = adaptive_mt.ImportAdaptiveMtFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_adaptive_mt_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_files( + self, + request: Optional[Union[adaptive_mt.ListAdaptiveMtFilesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtFilesPager: + r"""Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest, dict]): + The request object. The request to list all AdaptiveMt + files under a given dataset. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager: + The response for listing all + AdaptiveMt files under a given dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.ListAdaptiveMtFilesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.ListAdaptiveMtFilesRequest): + request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_adaptive_mt_files] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtFilesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_adaptive_mt_sentences( + self, + request: Optional[ + Union[adaptive_mt.ListAdaptiveMtSentencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdaptiveMtSentencesPager: + r"""Lists all AdaptiveMtSentences under a given + file/dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3 + + def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest, dict]): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. The following format lists + all sentences under a file. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + The following format lists all sentences within a + dataset. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager: + List AdaptiveMt sentences response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a adaptive_mt.ListAdaptiveMtSentencesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, adaptive_mt.ListAdaptiveMtSentencesRequest): + request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_adaptive_mt_sentences + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdaptiveMtSentencesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "TranslationServiceClient": return self diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py index 90ba674c40a1..97150302b979 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/pagers.py @@ -24,7 +24,7 @@ Tuple, ) -from google.cloud.translate_v3.types import translation_service +from google.cloud.translate_v3.types import adaptive_mt, translation_service class ListGlossariesPager: @@ -153,3 +153,387 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtDatasetsPager: + """A pager for iterating through ``list_adaptive_mt_datasets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``adaptive_mt_datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdaptiveMtDatasets`` requests and continue to iterate + through the ``adaptive_mt_datasets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., adaptive_mt.ListAdaptiveMtDatasetsResponse], + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + response: adaptive_mt.ListAdaptiveMtDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[adaptive_mt.ListAdaptiveMtDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[adaptive_mt.AdaptiveMtDataset]: + for page in self.pages: + yield from page.adaptive_mt_datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtDatasetsAsyncPager: + """A pager for iterating through ``list_adaptive_mt_datasets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``adaptive_mt_datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAdaptiveMtDatasets`` requests and continue to iterate + through the ``adaptive_mt_datasets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[adaptive_mt.ListAdaptiveMtDatasetsResponse]], + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + response: adaptive_mt.ListAdaptiveMtDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtDatasetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[adaptive_mt.ListAdaptiveMtDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[adaptive_mt.AdaptiveMtDataset]: + async def async_generator(): + async for page in self.pages: + for response in page.adaptive_mt_datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtFilesPager: + """A pager for iterating through ``list_adaptive_mt_files`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``adaptive_mt_files`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdaptiveMtFiles`` requests and continue to iterate + through the ``adaptive_mt_files`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., adaptive_mt.ListAdaptiveMtFilesResponse], + request: adaptive_mt.ListAdaptiveMtFilesRequest, + response: adaptive_mt.ListAdaptiveMtFilesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[adaptive_mt.ListAdaptiveMtFilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[adaptive_mt.AdaptiveMtFile]: + for page in self.pages: + yield from page.adaptive_mt_files + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtFilesAsyncPager: + """A pager for iterating through ``list_adaptive_mt_files`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``adaptive_mt_files`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAdaptiveMtFiles`` requests and continue to iterate + through the ``adaptive_mt_files`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[adaptive_mt.ListAdaptiveMtFilesResponse]], + request: adaptive_mt.ListAdaptiveMtFilesRequest, + response: adaptive_mt.ListAdaptiveMtFilesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtFilesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtFilesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[adaptive_mt.ListAdaptiveMtFilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[adaptive_mt.AdaptiveMtFile]: + async def async_generator(): + async for page in self.pages: + for response in page.adaptive_mt_files: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtSentencesPager: + """A pager for iterating through ``list_adaptive_mt_sentences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``adaptive_mt_sentences`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdaptiveMtSentences`` requests and continue to iterate + through the ``adaptive_mt_sentences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., adaptive_mt.ListAdaptiveMtSentencesResponse], + request: adaptive_mt.ListAdaptiveMtSentencesRequest, + response: adaptive_mt.ListAdaptiveMtSentencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[adaptive_mt.ListAdaptiveMtSentencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[adaptive_mt.AdaptiveMtSentence]: + for page in self.pages: + yield from page.adaptive_mt_sentences + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdaptiveMtSentencesAsyncPager: + """A pager for iterating through ``list_adaptive_mt_sentences`` requests. + + This class thinly wraps an initial + :class:`google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``adaptive_mt_sentences`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAdaptiveMtSentences`` requests and continue to iterate + through the ``adaptive_mt_sentences`` field on the + corresponding responses. + + All the usual :class:`google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[adaptive_mt.ListAdaptiveMtSentencesResponse]], + request: adaptive_mt.ListAdaptiveMtSentencesRequest, + response: adaptive_mt.ListAdaptiveMtSentencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest): + The initial request object. + response (google.cloud.translate_v3.types.ListAdaptiveMtSentencesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = adaptive_mt.ListAdaptiveMtSentencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[adaptive_mt.ListAdaptiveMtSentencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[adaptive_mt.AdaptiveMtSentence]: + async def async_generator(): + async for page in self.pages: + for response in page.adaptive_mt_sentences: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py index 1b5d4c338e9b..c4937b556f24 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/base.py @@ -22,10 +22,12 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.translate_v3 import gapic_version as package_version -from google.cloud.translate_v3.types import translation_service from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.translate_v3 import gapic_version as package_version +from google.cloud.translate_v3.types import adaptive_mt, translation_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -215,6 +217,56 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.create_adaptive_mt_dataset: gapic_v1.method.wrap_method( + self.create_adaptive_mt_dataset, + default_timeout=None, + client_info=client_info, + ), + self.delete_adaptive_mt_dataset: gapic_v1.method.wrap_method( + self.delete_adaptive_mt_dataset, + default_timeout=None, + client_info=client_info, + ), + self.get_adaptive_mt_dataset: gapic_v1.method.wrap_method( + self.get_adaptive_mt_dataset, + default_timeout=None, + client_info=client_info, + ), + self.list_adaptive_mt_datasets: gapic_v1.method.wrap_method( + self.list_adaptive_mt_datasets, + default_timeout=None, + client_info=client_info, + ), + self.adaptive_mt_translate: gapic_v1.method.wrap_method( + self.adaptive_mt_translate, + default_timeout=None, + client_info=client_info, + ), + self.get_adaptive_mt_file: gapic_v1.method.wrap_method( + self.get_adaptive_mt_file, + default_timeout=None, + client_info=client_info, + ), + self.delete_adaptive_mt_file: gapic_v1.method.wrap_method( + self.delete_adaptive_mt_file, + default_timeout=None, + client_info=client_info, + ), + self.import_adaptive_mt_file: gapic_v1.method.wrap_method( + self.import_adaptive_mt_file, + default_timeout=None, + client_info=client_info, + ), + self.list_adaptive_mt_files: gapic_v1.method.wrap_method( + self.list_adaptive_mt_files, + default_timeout=None, + client_info=client_info, + ), + self.list_adaptive_mt_sentences: gapic_v1.method.wrap_method( + self.list_adaptive_mt_sentences, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -336,6 +388,111 @@ def delete_glossary( ]: raise NotImplementedError() + @property + def create_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.CreateAdaptiveMtDatasetRequest], + Union[adaptive_mt.AdaptiveMtDataset, Awaitable[adaptive_mt.AdaptiveMtDataset]], + ]: + raise NotImplementedError() + + @property + def delete_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.DeleteAdaptiveMtDatasetRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtDatasetRequest], + Union[adaptive_mt.AdaptiveMtDataset, Awaitable[adaptive_mt.AdaptiveMtDataset]], + ]: + raise NotImplementedError() + + @property + def list_adaptive_mt_datasets( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtDatasetsRequest], + Union[ + adaptive_mt.ListAdaptiveMtDatasetsResponse, + Awaitable[adaptive_mt.ListAdaptiveMtDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def adaptive_mt_translate( + self, + ) -> Callable[ + [adaptive_mt.AdaptiveMtTranslateRequest], + Union[ + adaptive_mt.AdaptiveMtTranslateResponse, + Awaitable[adaptive_mt.AdaptiveMtTranslateResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtFileRequest], + Union[adaptive_mt.AdaptiveMtFile, Awaitable[adaptive_mt.AdaptiveMtFile]], + ]: + raise NotImplementedError() + + @property + def delete_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.DeleteAdaptiveMtFileRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def import_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.ImportAdaptiveMtFileRequest], + Union[ + adaptive_mt.ImportAdaptiveMtFileResponse, + Awaitable[adaptive_mt.ImportAdaptiveMtFileResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_adaptive_mt_files( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtFilesRequest], + Union[ + adaptive_mt.ListAdaptiveMtFilesResponse, + Awaitable[adaptive_mt.ListAdaptiveMtFilesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_adaptive_mt_sentences( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtSentencesRequest], + Union[ + adaptive_mt.ListAdaptiveMtSentencesResponse, + Awaitable[adaptive_mt.ListAdaptiveMtSentencesResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py index aca6525948cf..d2d25800328f 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc.py @@ -16,14 +16,15 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore - from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3.types import translation_service from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.translate_v3.types import adaptive_mt, translation_service from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport @@ -553,6 +554,290 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def create_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.CreateAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + r"""Return a callable for the create adaptive mt dataset method over gRPC. + + Creates an Adaptive MT dataset. + + Returns: + Callable[[~.CreateAdaptiveMtDatasetRequest], + ~.AdaptiveMtDataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_adaptive_mt_dataset" not in self._stubs: + self._stubs["create_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateAdaptiveMtDataset", + request_serializer=adaptive_mt.CreateAdaptiveMtDatasetRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtDataset.deserialize, + ) + return self._stubs["create_adaptive_mt_dataset"] + + @property + def delete_adaptive_mt_dataset( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtDatasetRequest], empty_pb2.Empty]: + r"""Return a callable for the delete adaptive mt dataset method over gRPC. + + Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + Returns: + Callable[[~.DeleteAdaptiveMtDatasetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_adaptive_mt_dataset" not in self._stubs: + self._stubs["delete_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteAdaptiveMtDataset", + request_serializer=adaptive_mt.DeleteAdaptiveMtDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_adaptive_mt_dataset"] + + @property + def get_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + r"""Return a callable for the get adaptive mt dataset method over gRPC. + + Gets the Adaptive MT dataset. + + Returns: + Callable[[~.GetAdaptiveMtDatasetRequest], + ~.AdaptiveMtDataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_adaptive_mt_dataset" not in self._stubs: + self._stubs["get_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetAdaptiveMtDataset", + request_serializer=adaptive_mt.GetAdaptiveMtDatasetRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtDataset.deserialize, + ) + return self._stubs["get_adaptive_mt_dataset"] + + @property + def list_adaptive_mt_datasets( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtDatasetsRequest], + adaptive_mt.ListAdaptiveMtDatasetsResponse, + ]: + r"""Return a callable for the list adaptive mt datasets method over gRPC. + + Lists all Adaptive MT datasets for which the caller + has read permission. + + Returns: + Callable[[~.ListAdaptiveMtDatasetsRequest], + ~.ListAdaptiveMtDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_datasets" not in self._stubs: + self._stubs["list_adaptive_mt_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtDatasets", + request_serializer=adaptive_mt.ListAdaptiveMtDatasetsRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtDatasetsResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_datasets"] + + @property + def adaptive_mt_translate( + self, + ) -> Callable[ + [adaptive_mt.AdaptiveMtTranslateRequest], + adaptive_mt.AdaptiveMtTranslateResponse, + ]: + r"""Return a callable for the adaptive mt translate method over gRPC. + + Translate text using Adaptive MT. + + Returns: + Callable[[~.AdaptiveMtTranslateRequest], + ~.AdaptiveMtTranslateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "adaptive_mt_translate" not in self._stubs: + self._stubs["adaptive_mt_translate"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/AdaptiveMtTranslate", + request_serializer=adaptive_mt.AdaptiveMtTranslateRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtTranslateResponse.deserialize, + ) + return self._stubs["adaptive_mt_translate"] + + @property + def get_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.GetAdaptiveMtFileRequest], adaptive_mt.AdaptiveMtFile]: + r"""Return a callable for the get adaptive mt file method over gRPC. + + Gets and AdaptiveMtFile + + Returns: + Callable[[~.GetAdaptiveMtFileRequest], + ~.AdaptiveMtFile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_adaptive_mt_file" not in self._stubs: + self._stubs["get_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetAdaptiveMtFile", + request_serializer=adaptive_mt.GetAdaptiveMtFileRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtFile.deserialize, + ) + return self._stubs["get_adaptive_mt_file"] + + @property + def delete_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtFileRequest], empty_pb2.Empty]: + r"""Return a callable for the delete adaptive mt file method over gRPC. + + Deletes an AdaptiveMtFile along with its sentences. + + Returns: + Callable[[~.DeleteAdaptiveMtFileRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_adaptive_mt_file" not in self._stubs: + self._stubs["delete_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteAdaptiveMtFile", + request_serializer=adaptive_mt.DeleteAdaptiveMtFileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_adaptive_mt_file"] + + @property + def import_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.ImportAdaptiveMtFileRequest], + adaptive_mt.ImportAdaptiveMtFileResponse, + ]: + r"""Return a callable for the import adaptive mt file method over gRPC. + + Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + Returns: + Callable[[~.ImportAdaptiveMtFileRequest], + ~.ImportAdaptiveMtFileResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_adaptive_mt_file" not in self._stubs: + self._stubs["import_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ImportAdaptiveMtFile", + request_serializer=adaptive_mt.ImportAdaptiveMtFileRequest.serialize, + response_deserializer=adaptive_mt.ImportAdaptiveMtFileResponse.deserialize, + ) + return self._stubs["import_adaptive_mt_file"] + + @property + def list_adaptive_mt_files( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtFilesRequest], + adaptive_mt.ListAdaptiveMtFilesResponse, + ]: + r"""Return a callable for the list adaptive mt files method over gRPC. + + Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + Returns: + Callable[[~.ListAdaptiveMtFilesRequest], + ~.ListAdaptiveMtFilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_files" not in self._stubs: + self._stubs["list_adaptive_mt_files"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtFiles", + request_serializer=adaptive_mt.ListAdaptiveMtFilesRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtFilesResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_files"] + + @property + def list_adaptive_mt_sentences( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtSentencesRequest], + adaptive_mt.ListAdaptiveMtSentencesResponse, + ]: + r"""Return a callable for the list adaptive mt sentences method over gRPC. + + Lists all AdaptiveMtSentences under a given + file/dataset. + + Returns: + Callable[[~.ListAdaptiveMtSentencesRequest], + ~.ListAdaptiveMtSentencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_sentences" not in self._stubs: + self._stubs["list_adaptive_mt_sentences"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtSentences", + request_serializer=adaptive_mt.ListAdaptiveMtSentencesRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtSentencesResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_sentences"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py index 8cdc572e8d96..1daabe2b44b7 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/grpc_asyncio.py @@ -16,14 +16,15 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3.types import translation_service from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.translate_v3.types import adaptive_mt, translation_service from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .grpc import TranslationServiceGrpcTransport @@ -561,6 +562,298 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def create_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.CreateAdaptiveMtDatasetRequest], + Awaitable[adaptive_mt.AdaptiveMtDataset], + ]: + r"""Return a callable for the create adaptive mt dataset method over gRPC. + + Creates an Adaptive MT dataset. + + Returns: + Callable[[~.CreateAdaptiveMtDatasetRequest], + Awaitable[~.AdaptiveMtDataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_adaptive_mt_dataset" not in self._stubs: + self._stubs["create_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateAdaptiveMtDataset", + request_serializer=adaptive_mt.CreateAdaptiveMtDatasetRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtDataset.deserialize, + ) + return self._stubs["create_adaptive_mt_dataset"] + + @property + def delete_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.DeleteAdaptiveMtDatasetRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete adaptive mt dataset method over gRPC. + + Deletes an Adaptive MT dataset, including all its + entries and associated metadata. + + Returns: + Callable[[~.DeleteAdaptiveMtDatasetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_adaptive_mt_dataset" not in self._stubs: + self._stubs["delete_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteAdaptiveMtDataset", + request_serializer=adaptive_mt.DeleteAdaptiveMtDatasetRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_adaptive_mt_dataset"] + + @property + def get_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtDatasetRequest], + Awaitable[adaptive_mt.AdaptiveMtDataset], + ]: + r"""Return a callable for the get adaptive mt dataset method over gRPC. + + Gets the Adaptive MT dataset. + + Returns: + Callable[[~.GetAdaptiveMtDatasetRequest], + Awaitable[~.AdaptiveMtDataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_adaptive_mt_dataset" not in self._stubs: + self._stubs["get_adaptive_mt_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetAdaptiveMtDataset", + request_serializer=adaptive_mt.GetAdaptiveMtDatasetRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtDataset.deserialize, + ) + return self._stubs["get_adaptive_mt_dataset"] + + @property + def list_adaptive_mt_datasets( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtDatasetsRequest], + Awaitable[adaptive_mt.ListAdaptiveMtDatasetsResponse], + ]: + r"""Return a callable for the list adaptive mt datasets method over gRPC. + + Lists all Adaptive MT datasets for which the caller + has read permission. + + Returns: + Callable[[~.ListAdaptiveMtDatasetsRequest], + Awaitable[~.ListAdaptiveMtDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_datasets" not in self._stubs: + self._stubs["list_adaptive_mt_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtDatasets", + request_serializer=adaptive_mt.ListAdaptiveMtDatasetsRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtDatasetsResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_datasets"] + + @property + def adaptive_mt_translate( + self, + ) -> Callable[ + [adaptive_mt.AdaptiveMtTranslateRequest], + Awaitable[adaptive_mt.AdaptiveMtTranslateResponse], + ]: + r"""Return a callable for the adaptive mt translate method over gRPC. + + Translate text using Adaptive MT. + + Returns: + Callable[[~.AdaptiveMtTranslateRequest], + Awaitable[~.AdaptiveMtTranslateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "adaptive_mt_translate" not in self._stubs: + self._stubs["adaptive_mt_translate"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/AdaptiveMtTranslate", + request_serializer=adaptive_mt.AdaptiveMtTranslateRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtTranslateResponse.deserialize, + ) + return self._stubs["adaptive_mt_translate"] + + @property + def get_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtFileRequest], Awaitable[adaptive_mt.AdaptiveMtFile] + ]: + r"""Return a callable for the get adaptive mt file method over gRPC. + + Gets and AdaptiveMtFile + + Returns: + Callable[[~.GetAdaptiveMtFileRequest], + Awaitable[~.AdaptiveMtFile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_adaptive_mt_file" not in self._stubs: + self._stubs["get_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetAdaptiveMtFile", + request_serializer=adaptive_mt.GetAdaptiveMtFileRequest.serialize, + response_deserializer=adaptive_mt.AdaptiveMtFile.deserialize, + ) + return self._stubs["get_adaptive_mt_file"] + + @property + def delete_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.DeleteAdaptiveMtFileRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete adaptive mt file method over gRPC. + + Deletes an AdaptiveMtFile along with its sentences. + + Returns: + Callable[[~.DeleteAdaptiveMtFileRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_adaptive_mt_file" not in self._stubs: + self._stubs["delete_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteAdaptiveMtFile", + request_serializer=adaptive_mt.DeleteAdaptiveMtFileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_adaptive_mt_file"] + + @property + def import_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.ImportAdaptiveMtFileRequest], + Awaitable[adaptive_mt.ImportAdaptiveMtFileResponse], + ]: + r"""Return a callable for the import adaptive mt file method over gRPC. + + Imports an AdaptiveMtFile and adds all of its + sentences into the AdaptiveMtDataset. + + Returns: + Callable[[~.ImportAdaptiveMtFileRequest], + Awaitable[~.ImportAdaptiveMtFileResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_adaptive_mt_file" not in self._stubs: + self._stubs["import_adaptive_mt_file"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ImportAdaptiveMtFile", + request_serializer=adaptive_mt.ImportAdaptiveMtFileRequest.serialize, + response_deserializer=adaptive_mt.ImportAdaptiveMtFileResponse.deserialize, + ) + return self._stubs["import_adaptive_mt_file"] + + @property + def list_adaptive_mt_files( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtFilesRequest], + Awaitable[adaptive_mt.ListAdaptiveMtFilesResponse], + ]: + r"""Return a callable for the list adaptive mt files method over gRPC. + + Lists all AdaptiveMtFiles associated to an + AdaptiveMtDataset. + + Returns: + Callable[[~.ListAdaptiveMtFilesRequest], + Awaitable[~.ListAdaptiveMtFilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_files" not in self._stubs: + self._stubs["list_adaptive_mt_files"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtFiles", + request_serializer=adaptive_mt.ListAdaptiveMtFilesRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtFilesResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_files"] + + @property + def list_adaptive_mt_sentences( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtSentencesRequest], + Awaitable[adaptive_mt.ListAdaptiveMtSentencesResponse], + ]: + r"""Return a callable for the list adaptive mt sentences method over gRPC. + + Lists all AdaptiveMtSentences under a given + file/dataset. + + Returns: + Callable[[~.ListAdaptiveMtSentencesRequest], + Awaitable[~.ListAdaptiveMtSentencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_adaptive_mt_sentences" not in self._stubs: + self._stubs["list_adaptive_mt_sentences"] = self.grpc_channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListAdaptiveMtSentences", + request_serializer=adaptive_mt.ListAdaptiveMtSentencesRequest.serialize, + response_deserializer=adaptive_mt.ListAdaptiveMtSentencesResponse.deserialize, + ) + return self._stubs["list_adaptive_mt_sentences"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py index b57ebb081755..e00021084f35 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/services/translation_service/transports/rest.py @@ -20,9 +20,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore -from requests import __version__ as requests_version - from google.api_core import ( gapic_v1, operations_v1, @@ -36,6 +33,8 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -43,8 +42,10 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.cloud.translate_v3.types import translation_service from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.translate_v3.types import adaptive_mt, translation_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import TranslationServiceTransport @@ -71,6 +72,14 @@ class TranslationServiceRestInterceptor: .. code-block:: python class MyCustomTranslationServiceInterceptor(TranslationServiceRestInterceptor): + def pre_adaptive_mt_translate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_adaptive_mt_translate(self, response): + logging.log(f"Received response: {response}") + return response + def pre_batch_translate_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -87,6 +96,14 @@ def post_batch_translate_text(self, response): logging.log(f"Received response: {response}") return response + def pre_create_adaptive_mt_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_adaptive_mt_dataset(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -95,6 +112,14 @@ def post_create_glossary(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_adaptive_mt_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_adaptive_mt_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -111,6 +136,22 @@ def post_detect_language(self, response): logging.log(f"Received response: {response}") return response + def pre_get_adaptive_mt_dataset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_adaptive_mt_dataset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_adaptive_mt_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_adaptive_mt_file(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_glossary(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +168,38 @@ def post_get_supported_languages(self, response): logging.log(f"Received response: {response}") return response + def pre_import_adaptive_mt_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_adaptive_mt_file(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_adaptive_mt_datasets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_adaptive_mt_datasets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_adaptive_mt_files(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_adaptive_mt_files(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_adaptive_mt_sentences(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_adaptive_mt_sentences(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_glossaries(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -157,6 +230,29 @@ def post_translate_text(self, response): """ + def pre_adaptive_mt_translate( + self, + request: adaptive_mt.AdaptiveMtTranslateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.AdaptiveMtTranslateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for adaptive_mt_translate + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_adaptive_mt_translate( + self, response: adaptive_mt.AdaptiveMtTranslateResponse + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + """Post-rpc interceptor for adaptive_mt_translate + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_batch_translate_document( self, request: translation_service.BatchTranslateDocumentRequest, @@ -207,6 +303,29 @@ def post_batch_translate_text( """ return response + def pre_create_adaptive_mt_dataset( + self, + request: adaptive_mt.CreateAdaptiveMtDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.CreateAdaptiveMtDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_adaptive_mt_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_create_adaptive_mt_dataset( + self, response: adaptive_mt.AdaptiveMtDataset + ) -> adaptive_mt.AdaptiveMtDataset: + """Post-rpc interceptor for create_adaptive_mt_dataset + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_create_glossary( self, request: translation_service.CreateGlossaryRequest, @@ -230,6 +349,30 @@ def post_create_glossary( """ return response + def pre_delete_adaptive_mt_dataset( + self, + request: adaptive_mt.DeleteAdaptiveMtDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.DeleteAdaptiveMtDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_adaptive_mt_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def pre_delete_adaptive_mt_file( + self, + request: adaptive_mt.DeleteAdaptiveMtFileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.DeleteAdaptiveMtFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_adaptive_mt_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + def pre_delete_glossary( self, request: translation_service.DeleteGlossaryRequest, @@ -276,6 +419,52 @@ def post_detect_language( """ return response + def pre_get_adaptive_mt_dataset( + self, + request: adaptive_mt.GetAdaptiveMtDatasetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.GetAdaptiveMtDatasetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_adaptive_mt_dataset + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_get_adaptive_mt_dataset( + self, response: adaptive_mt.AdaptiveMtDataset + ) -> adaptive_mt.AdaptiveMtDataset: + """Post-rpc interceptor for get_adaptive_mt_dataset + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_get_adaptive_mt_file( + self, + request: adaptive_mt.GetAdaptiveMtFileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.GetAdaptiveMtFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_adaptive_mt_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_get_adaptive_mt_file( + self, response: adaptive_mt.AdaptiveMtFile + ) -> adaptive_mt.AdaptiveMtFile: + """Post-rpc interceptor for get_adaptive_mt_file + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_get_glossary( self, request: translation_service.GetGlossaryRequest, @@ -324,6 +513,98 @@ def post_get_supported_languages( """ return response + def pre_import_adaptive_mt_file( + self, + request: adaptive_mt.ImportAdaptiveMtFileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ImportAdaptiveMtFileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_adaptive_mt_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_import_adaptive_mt_file( + self, response: adaptive_mt.ImportAdaptiveMtFileResponse + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + """Post-rpc interceptor for import_adaptive_mt_file + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_adaptive_mt_datasets( + self, + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ListAdaptiveMtDatasetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_adaptive_mt_datasets + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_adaptive_mt_datasets( + self, response: adaptive_mt.ListAdaptiveMtDatasetsResponse + ) -> adaptive_mt.ListAdaptiveMtDatasetsResponse: + """Post-rpc interceptor for list_adaptive_mt_datasets + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_adaptive_mt_files( + self, + request: adaptive_mt.ListAdaptiveMtFilesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ListAdaptiveMtFilesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_adaptive_mt_files + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_adaptive_mt_files( + self, response: adaptive_mt.ListAdaptiveMtFilesResponse + ) -> adaptive_mt.ListAdaptiveMtFilesResponse: + """Post-rpc interceptor for list_adaptive_mt_files + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + + def pre_list_adaptive_mt_sentences( + self, + request: adaptive_mt.ListAdaptiveMtSentencesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[adaptive_mt.ListAdaptiveMtSentencesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_adaptive_mt_sentences + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_list_adaptive_mt_sentences( + self, response: adaptive_mt.ListAdaptiveMtSentencesResponse + ) -> adaptive_mt.ListAdaptiveMtSentencesResponse: + """Post-rpc interceptor for list_adaptive_mt_sentences + + Override in a subclass to manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. + """ + return response + def pre_list_glossaries( self, request: translation_service.ListGlossariesRequest, @@ -551,9 +832,9 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _BatchTranslateDocument(TranslationServiceRestStub): + class _AdaptiveMtTranslate(TranslationServiceRestStub): def __hash__(self): - return hash("BatchTranslateDocument") + return hash("AdaptiveMtTranslate") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -567,17 +848,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.BatchTranslateDocumentRequest, + request: adaptive_mt.AdaptiveMtTranslateRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the batch translate document method over HTTP. + ) -> adaptive_mt.AdaptiveMtTranslateResponse: + r"""Call the adaptive mt translate method over HTTP. Args: - request (~.translation_service.BatchTranslateDocumentRequest): - The request object. The BatchTranslateDocument request. + request (~.adaptive_mt.AdaptiveMtTranslateRequest): + The request object. The request for sending an AdaptiveMt + translation query. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -585,24 +867,21 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + ~.adaptive_mt.AdaptiveMtTranslateResponse: + An AdaptiveMtTranslate response. """ http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:batchTranslateDocument", + "uri": "/v3/{parent=projects/*/locations/*}:adaptiveMtTranslate", "body": "*", }, ] - request, metadata = self._interceptor.pre_batch_translate_document( + request, metadata = self._interceptor.pre_adaptive_mt_translate( request, metadata ) - pb_request = translation_service.BatchTranslateDocumentRequest.pb(request) + pb_request = adaptive_mt.AdaptiveMtTranslateRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -644,14 +923,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_translate_document(resp) + resp = adaptive_mt.AdaptiveMtTranslateResponse() + pb_resp = adaptive_mt.AdaptiveMtTranslateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_adaptive_mt_translate(resp) return resp - class _BatchTranslateText(TranslationServiceRestStub): + class _BatchTranslateDocument(TranslationServiceRestStub): def __hash__(self): - return hash("BatchTranslateText") + return hash("BatchTranslateDocument") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -665,17 +946,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.BatchTranslateTextRequest, + request: translation_service.BatchTranslateDocumentRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the batch translate text method over HTTP. + r"""Call the batch translate document method over HTTP. Args: - request (~.translation_service.BatchTranslateTextRequest): - The request object. The batch translation request. + request (~.translation_service.BatchTranslateDocumentRequest): + The request object. The BatchTranslateDocument request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -693,23 +974,926 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:batchTranslateText", + "uri": "/v3/{parent=projects/*/locations/*}:batchTranslateDocument", "body": "*", }, ] - request, metadata = self._interceptor.pre_batch_translate_text( - request, metadata - ) - pb_request = translation_service.BatchTranslateTextRequest.pb(request) + request, metadata = self._interceptor.pre_batch_translate_document( + request, metadata + ) + pb_request = translation_service.BatchTranslateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_translate_document(resp) + return resp + + class _BatchTranslateText(TranslationServiceRestStub): + def __hash__(self): + return hash("BatchTranslateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.BatchTranslateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch translate text method over HTTP. + + Args: + request (~.translation_service.BatchTranslateTextRequest): + The request object. The batch translation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:batchTranslateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_translate_text( + request, metadata + ) + pb_request = translation_service.BatchTranslateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_translate_text(resp) + return resp + + class _CreateAdaptiveMtDataset(TranslationServiceRestStub): + def __hash__(self): + return hash("CreateAdaptiveMtDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.CreateAdaptiveMtDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Call the create adaptive mt + dataset method over HTTP. + + Args: + request (~.adaptive_mt.CreateAdaptiveMtDatasetRequest): + The request object. Request message for creating an + AdaptiveMtDataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets", + "body": "adaptive_mt_dataset", + }, + ] + request, metadata = self._interceptor.pre_create_adaptive_mt_dataset( + request, metadata + ) + pb_request = adaptive_mt.CreateAdaptiveMtDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.AdaptiveMtDataset() + pb_resp = adaptive_mt.AdaptiveMtDataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_adaptive_mt_dataset(resp) + return resp + + class _CreateGlossary(TranslationServiceRestStub): + def __hash__(self): + return hash("CreateGlossary") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.CreateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create glossary method over HTTP. + + Args: + request (~.translation_service.CreateGlossaryRequest): + The request object. Request message for CreateGlossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/glossaries", + "body": "glossary", + }, + ] + request, metadata = self._interceptor.pre_create_glossary(request, metadata) + pb_request = translation_service.CreateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_glossary(resp) + return resp + + class _DeleteAdaptiveMtDataset(TranslationServiceRestStub): + def __hash__(self): + return hash("DeleteAdaptiveMtDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.DeleteAdaptiveMtDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete adaptive mt + dataset method over HTTP. + + Args: + request (~.adaptive_mt.DeleteAdaptiveMtDatasetRequest): + The request object. Request message for deleting an + AdaptiveMtDataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_adaptive_mt_dataset( + request, metadata + ) + pb_request = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteAdaptiveMtFile(TranslationServiceRestStub): + def __hash__(self): + return hash("DeleteAdaptiveMtFile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.DeleteAdaptiveMtFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete adaptive mt file method over HTTP. + + Args: + request (~.adaptive_mt.DeleteAdaptiveMtFileRequest): + The request object. The request for deleting an + AdaptiveMt file. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_adaptive_mt_file( + request, metadata + ) + pb_request = adaptive_mt.DeleteAdaptiveMtFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteGlossary(TranslationServiceRestStub): + def __hash__(self): + return hash("DeleteGlossary") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.DeleteGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete glossary method over HTTP. + + Args: + request (~.translation_service.DeleteGlossaryRequest): + The request object. Request message for DeleteGlossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_glossary(request, metadata) + pb_request = translation_service.DeleteGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_glossary(resp) + return resp + + class _DetectLanguage(TranslationServiceRestStub): + def __hash__(self): + return hash("DetectLanguage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.DetectLanguageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.DetectLanguageResponse: + r"""Call the detect language method over HTTP. + + Args: + request (~.translation_service.DetectLanguageRequest): + The request object. The request message for language + detection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.DetectLanguageResponse: + The response message for language + detection. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}:detectLanguage", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*}:detectLanguage", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_detect_language(request, metadata) + pb_request = translation_service.DetectLanguageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.DetectLanguageResponse() + pb_resp = translation_service.DetectLanguageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detect_language(resp) + return resp + + class _GetAdaptiveMtDataset(TranslationServiceRestStub): + def __hash__(self): + return hash("GetAdaptiveMtDataset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.GetAdaptiveMtDatasetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtDataset: + r"""Call the get adaptive mt dataset method over HTTP. + + Args: + request (~.adaptive_mt.GetAdaptiveMtDatasetRequest): + The request object. Request message for getting an + Adaptive MT dataset. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.AdaptiveMtDataset: + An Adaptive MT Dataset. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_adaptive_mt_dataset( + request, metadata + ) + pb_request = adaptive_mt.GetAdaptiveMtDatasetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.AdaptiveMtDataset() + pb_resp = adaptive_mt.AdaptiveMtDataset.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_adaptive_mt_dataset(resp) + return resp + + class _GetAdaptiveMtFile(TranslationServiceRestStub): + def __hash__(self): + return hash("GetAdaptiveMtFile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: adaptive_mt.GetAdaptiveMtFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> adaptive_mt.AdaptiveMtFile: + r"""Call the get adaptive mt file method over HTTP. + + Args: + request (~.adaptive_mt.GetAdaptiveMtFileRequest): + The request object. The request for getting an + AdaptiveMtFile. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.AdaptiveMtFile: + An AdaptiveMtFile. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}", + }, + ] + request, metadata = self._interceptor.pre_get_adaptive_mt_file( + request, metadata + ) + pb_request = adaptive_mt.GetAdaptiveMtFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = adaptive_mt.AdaptiveMtFile() + pb_resp = adaptive_mt.AdaptiveMtFile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_adaptive_mt_file(resp) + return resp + + class _GetGlossary(TranslationServiceRestStub): + def __hash__(self): + return hash("GetGlossary") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: translation_service.GetGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.Glossary: + r"""Call the get glossary method over HTTP. + + Args: + request (~.translation_service.GetGlossaryRequest): + The request object. Request message for GetGlossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.translation_service.Glossary: + Represents a glossary built from + user-provided data. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", + }, + ] + request, metadata = self._interceptor.pre_get_glossary(request, metadata) + pb_request = translation_service.GetGlossaryRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -733,7 +1917,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -742,14 +1925,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_translate_text(resp) + resp = translation_service.Glossary() + pb_resp = translation_service.Glossary.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_glossary(resp) return resp - class _CreateGlossary(TranslationServiceRestStub): + class _GetSupportedLanguages(TranslationServiceRestStub): def __hash__(self): - return hash("CreateGlossary") + return hash("GetSupportedLanguages") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -763,17 +1948,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.CreateGlossaryRequest, + request: translation_service.GetSupportedLanguagesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create glossary method over HTTP. + ) -> translation_service.SupportedLanguages: + r"""Call the get supported languages method over HTTP. Args: - request (~.translation_service.CreateGlossaryRequest): - The request object. Request message for CreateGlossary. + request (~.translation_service.GetSupportedLanguagesRequest): + The request object. The request message for discovering + supported languages. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -781,31 +1967,28 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.translation_service.SupportedLanguages: + The response message for discovering + supported languages. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}/glossaries", - "body": "glossary", + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/supportedLanguages", + }, + { + "method": "get", + "uri": "/v3/{parent=projects/*}/supportedLanguages", }, ] - request, metadata = self._interceptor.pre_create_glossary(request, metadata) - pb_request = translation_service.CreateGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_get_supported_languages( + request, metadata + ) + pb_request = translation_service.GetSupportedLanguagesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -829,7 +2012,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -838,14 +2020,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_glossary(resp) + resp = translation_service.SupportedLanguages() + pb_resp = translation_service.SupportedLanguages.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_supported_languages(resp) return resp - class _DeleteGlossary(TranslationServiceRestStub): + class _ImportAdaptiveMtFile(TranslationServiceRestStub): def __hash__(self): - return hash("DeleteGlossary") + return hash("ImportAdaptiveMtFile") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -859,17 +2043,19 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.DeleteGlossaryRequest, + request: adaptive_mt.ImportAdaptiveMtFileRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete glossary method over HTTP. + ) -> adaptive_mt.ImportAdaptiveMtFileResponse: + r"""Call the import adaptive mt file method over HTTP. Args: - request (~.translation_service.DeleteGlossaryRequest): - The request object. Request message for DeleteGlossary. + request (~.adaptive_mt.ImportAdaptiveMtFileRequest): + The request object. The request for importing an + AdaptiveMt file along with its + sentences. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -877,23 +2063,32 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.adaptive_mt.ImportAdaptiveMtFileResponse: + The response for importing an + AdaptiveMtFile """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile", + "body": "*", }, ] - request, metadata = self._interceptor.pre_delete_glossary(request, metadata) - pb_request = translation_service.DeleteGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_import_adaptive_mt_file( + request, metadata + ) + pb_request = adaptive_mt.ImportAdaptiveMtFileRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -917,6 +2112,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -925,14 +2121,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_glossary(resp) + resp = adaptive_mt.ImportAdaptiveMtFileResponse() + pb_resp = adaptive_mt.ImportAdaptiveMtFileResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_adaptive_mt_file(resp) return resp - class _DetectLanguage(TranslationServiceRestStub): + class _ListAdaptiveMtDatasets(TranslationServiceRestStub): def __hash__(self): - return hash("DetectLanguage") + return hash("ListAdaptiveMtDatasets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -946,18 +2144,19 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.DetectLanguageRequest, + request: adaptive_mt.ListAdaptiveMtDatasetsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.DetectLanguageResponse: - r"""Call the detect language method over HTTP. + ) -> adaptive_mt.ListAdaptiveMtDatasetsResponse: + r"""Call the list adaptive mt datasets method over HTTP. Args: - request (~.translation_service.DetectLanguageRequest): - The request object. The request message for language - detection. + request (~.adaptive_mt.ListAdaptiveMtDatasetsRequest): + The request object. Request message for listing all + Adaptive MT datasets that the requestor + has access to. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -965,35 +2164,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.DetectLanguageResponse: - The response message for language - detection. - + ~.adaptive_mt.ListAdaptiveMtDatasetsResponse: + A list of AdaptiveMtDatasets. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v3/{parent=projects/*/locations/*}:detectLanguage", - "body": "*", - }, - { - "method": "post", - "uri": "/v3/{parent=projects/*}:detectLanguage", - "body": "*", + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets", }, ] - request, metadata = self._interceptor.pre_detect_language(request, metadata) - pb_request = translation_service.DetectLanguageRequest.pb(request) + request, metadata = self._interceptor.pre_list_adaptive_mt_datasets( + request, metadata + ) + pb_request = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1017,7 +2203,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1026,16 +2211,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.DetectLanguageResponse() - pb_resp = translation_service.DetectLanguageResponse.pb(resp) + resp = adaptive_mt.ListAdaptiveMtDatasetsResponse() + pb_resp = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_detect_language(resp) + resp = self._interceptor.post_list_adaptive_mt_datasets(resp) return resp - class _GetGlossary(TranslationServiceRestStub): + class _ListAdaptiveMtFiles(TranslationServiceRestStub): def __hash__(self): - return hash("GetGlossary") + return hash("ListAdaptiveMtFiles") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1049,17 +2234,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.GetGlossaryRequest, + request: adaptive_mt.ListAdaptiveMtFilesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.Glossary: - r"""Call the get glossary method over HTTP. + ) -> adaptive_mt.ListAdaptiveMtFilesResponse: + r"""Call the list adaptive mt files method over HTTP. Args: - request (~.translation_service.GetGlossaryRequest): - The request object. Request message for GetGlossary. + request (~.adaptive_mt.ListAdaptiveMtFilesRequest): + The request object. The request to list all AdaptiveMt + files under a given dataset. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1067,20 +2253,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.translation_service.Glossary: - Represents a glossary built from - user-provided data. + ~.adaptive_mt.ListAdaptiveMtFilesResponse: + The response for listing all + AdaptiveMt files under a given dataset. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v3/{name=projects/*/locations/*/glossaries/*}", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles", }, ] - request, metadata = self._interceptor.pre_get_glossary(request, metadata) - pb_request = translation_service.GetGlossaryRequest.pb(request) + request, metadata = self._interceptor.pre_list_adaptive_mt_files( + request, metadata + ) + pb_request = adaptive_mt.ListAdaptiveMtFilesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1114,16 +2302,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.Glossary() - pb_resp = translation_service.Glossary.pb(resp) + resp = adaptive_mt.ListAdaptiveMtFilesResponse() + pb_resp = adaptive_mt.ListAdaptiveMtFilesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_glossary(resp) + resp = self._interceptor.post_list_adaptive_mt_files(resp) return resp - class _GetSupportedLanguages(TranslationServiceRestStub): + class _ListAdaptiveMtSentences(TranslationServiceRestStub): def __hash__(self): - return hash("GetSupportedLanguages") + return hash("ListAdaptiveMtSentences") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1137,45 +2325,44 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: translation_service.GetSupportedLanguagesRequest, + request: adaptive_mt.ListAdaptiveMtSentencesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> translation_service.SupportedLanguages: - r"""Call the get supported languages method over HTTP. - - Args: - request (~.translation_service.GetSupportedLanguagesRequest): - The request object. The request message for discovering - supported languages. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.translation_service.SupportedLanguages: - The response message for discovering - supported languages. - + ) -> adaptive_mt.ListAdaptiveMtSentencesResponse: + r"""Call the list adaptive mt + sentences method over HTTP. + + Args: + request (~.adaptive_mt.ListAdaptiveMtSentencesRequest): + The request object. The request for listing Adaptive MT + sentences from a Dataset/File. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.adaptive_mt.ListAdaptiveMtSentencesResponse: + List AdaptiveMt sentences response. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v3/{parent=projects/*/locations/*}/supportedLanguages", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences", }, { "method": "get", - "uri": "/v3/{parent=projects/*}/supportedLanguages", + "uri": "/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtSentences", }, ] - request, metadata = self._interceptor.pre_get_supported_languages( + request, metadata = self._interceptor.pre_list_adaptive_mt_sentences( request, metadata ) - pb_request = translation_service.GetSupportedLanguagesRequest.pb(request) + pb_request = adaptive_mt.ListAdaptiveMtSentencesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1209,11 +2396,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = translation_service.SupportedLanguages() - pb_resp = translation_service.SupportedLanguages.pb(resp) + resp = adaptive_mt.ListAdaptiveMtSentencesResponse() + pb_resp = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_supported_languages(resp) + resp = self._interceptor.post_list_adaptive_mt_sentences(resp) return resp class _ListGlossaries(TranslationServiceRestStub): @@ -1502,6 +2689,17 @@ def __call__( resp = self._interceptor.post_translate_text(resp) return resp + @property + def adaptive_mt_translate( + self, + ) -> Callable[ + [adaptive_mt.AdaptiveMtTranslateRequest], + adaptive_mt.AdaptiveMtTranslateResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AdaptiveMtTranslate(self._session, self._host, self._interceptor) # type: ignore + @property def batch_translate_document( self, @@ -1522,6 +2720,16 @@ def batch_translate_text( # In C++ this would require a dynamic_cast return self._BatchTranslateText(self._session, self._host, self._interceptor) # type: ignore + @property + def create_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.CreateAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + @property def create_glossary( self, @@ -1532,6 +2740,22 @@ def create_glossary( # In C++ this would require a dynamic_cast return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_adaptive_mt_dataset( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtDatasetRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.DeleteAdaptiveMtFileRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + @property def delete_glossary( self, @@ -1553,6 +2777,24 @@ def detect_language( # In C++ this would require a dynamic_cast return self._DetectLanguage(self._session, self._host, self._interceptor) # type: ignore + @property + def get_adaptive_mt_dataset( + self, + ) -> Callable[ + [adaptive_mt.GetAdaptiveMtDatasetRequest], adaptive_mt.AdaptiveMtDataset + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAdaptiveMtDataset(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_adaptive_mt_file( + self, + ) -> Callable[[adaptive_mt.GetAdaptiveMtFileRequest], adaptive_mt.AdaptiveMtFile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + @property def get_glossary( self, @@ -1574,6 +2816,50 @@ def get_supported_languages( # In C++ this would require a dynamic_cast return self._GetSupportedLanguages(self._session, self._host, self._interceptor) # type: ignore + @property + def import_adaptive_mt_file( + self, + ) -> Callable[ + [adaptive_mt.ImportAdaptiveMtFileRequest], + adaptive_mt.ImportAdaptiveMtFileResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportAdaptiveMtFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_datasets( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtDatasetsRequest], + adaptive_mt.ListAdaptiveMtDatasetsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtDatasets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_files( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtFilesRequest], + adaptive_mt.ListAdaptiveMtFilesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtFiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_adaptive_mt_sentences( + self, + ) -> Callable[ + [adaptive_mt.ListAdaptiveMtSentencesRequest], + adaptive_mt.ListAdaptiveMtSentencesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdaptiveMtSentences(self._session, self._host, self._interceptor) # type: ignore + @property def list_glossaries( self, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py index 3a700ac0d8cb..64c97da095f6 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/__init__.py @@ -13,6 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .adaptive_mt import ( + AdaptiveMtDataset, + AdaptiveMtFile, + AdaptiveMtSentence, + AdaptiveMtTranslateRequest, + AdaptiveMtTranslateResponse, + AdaptiveMtTranslation, + CreateAdaptiveMtDatasetRequest, + DeleteAdaptiveMtDatasetRequest, + DeleteAdaptiveMtFileRequest, + GetAdaptiveMtDatasetRequest, + GetAdaptiveMtFileRequest, + ImportAdaptiveMtFileRequest, + ImportAdaptiveMtFileResponse, + ListAdaptiveMtDatasetsRequest, + ListAdaptiveMtDatasetsResponse, + ListAdaptiveMtFilesRequest, + ListAdaptiveMtFilesResponse, + ListAdaptiveMtSentencesRequest, + ListAdaptiveMtSentencesResponse, +) +from .common import FileInputSource, GcsInputSource, GcsOutputDestination from .translation_service import ( BatchDocumentInputConfig, BatchDocumentOutputConfig, @@ -54,6 +76,28 @@ ) __all__ = ( + "AdaptiveMtDataset", + "AdaptiveMtFile", + "AdaptiveMtSentence", + "AdaptiveMtTranslateRequest", + "AdaptiveMtTranslateResponse", + "AdaptiveMtTranslation", + "CreateAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtFileRequest", + "GetAdaptiveMtDatasetRequest", + "GetAdaptiveMtFileRequest", + "ImportAdaptiveMtFileRequest", + "ImportAdaptiveMtFileResponse", + "ListAdaptiveMtDatasetsRequest", + "ListAdaptiveMtDatasetsResponse", + "ListAdaptiveMtFilesRequest", + "ListAdaptiveMtFilesResponse", + "ListAdaptiveMtSentencesRequest", + "ListAdaptiveMtSentencesResponse", + "FileInputSource", + "GcsInputSource", + "GcsOutputDestination", "BatchDocumentInputConfig", "BatchDocumentOutputConfig", "BatchTranslateDocumentMetadata", diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py new file mode 100644 index 000000000000..d98f649cfd49 --- /dev/null +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py @@ -0,0 +1,606 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.translate_v3.types import common + +__protobuf__ = proto.module( + package="google.cloud.translation.v3", + manifest={ + "AdaptiveMtDataset", + "CreateAdaptiveMtDatasetRequest", + "DeleteAdaptiveMtDatasetRequest", + "GetAdaptiveMtDatasetRequest", + "ListAdaptiveMtDatasetsRequest", + "ListAdaptiveMtDatasetsResponse", + "AdaptiveMtTranslateRequest", + "AdaptiveMtTranslation", + "AdaptiveMtTranslateResponse", + "AdaptiveMtFile", + "GetAdaptiveMtFileRequest", + "DeleteAdaptiveMtFileRequest", + "ImportAdaptiveMtFileRequest", + "ImportAdaptiveMtFileResponse", + "ListAdaptiveMtFilesRequest", + "ListAdaptiveMtFilesResponse", + "AdaptiveMtSentence", + "ListAdaptiveMtSentencesRequest", + "ListAdaptiveMtSentencesResponse", + }, +) + + +class AdaptiveMtDataset(proto.Message): + r"""An Adaptive MT Dataset. + + Attributes: + name (str): + Required. The resource name of the dataset, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset_id}`` + display_name (str): + The name of the dataset to show in the interface. The name + can be up to 32 characters long and can consist only of + ASCII Latin letters A-Z and a-z, underscores (_), and ASCII + digits 0-9. + source_language_code (str): + The BCP-47 language code of the source + language. + target_language_code (str): + The BCP-47 language code of the target + language. + example_count (int): + The number of examples in the dataset. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this dataset was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this dataset was + last updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + source_language_code: str = proto.Field( + proto.STRING, + number=3, + ) + target_language_code: str = proto.Field( + proto.STRING, + number=4, + ) + example_count: int = proto.Field( + proto.INT32, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + +class CreateAdaptiveMtDatasetRequest(proto.Message): + r"""Request message for creating an AdaptiveMtDataset. + + Attributes: + parent (str): + Required. Name of the parent project. In form of + ``projects/{project-number-or-id}/locations/{location-id}`` + adaptive_mt_dataset (google.cloud.translate_v3.types.AdaptiveMtDataset): + Required. The AdaptiveMtDataset to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + adaptive_mt_dataset: "AdaptiveMtDataset" = proto.Field( + proto.MESSAGE, + number=2, + message="AdaptiveMtDataset", + ) + + +class DeleteAdaptiveMtDatasetRequest(proto.Message): + r"""Request message for deleting an AdaptiveMtDataset. + + Attributes: + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetAdaptiveMtDatasetRequest(proto.Message): + r"""Request message for getting an Adaptive MT dataset. + + Attributes: + name (str): + Required. Name of the dataset. In the form of + ``projects/{project-number-or-id}/locations/{location-id}/adaptiveMtDatasets/{adaptive-mt-dataset-id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAdaptiveMtDatasetsRequest(proto.Message): + r"""Request message for listing all Adaptive MT datasets that the + requestor has access to. + + Attributes: + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT datasets. + ``projects/{project-number-or-id}/locations/{location-id}`` + page_size (int): + Optional. Requested page size. The server may + return fewer results than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token identifying a page of results the server + should return. Typically, this is the value of + ListAdaptiveMtDatasetsResponse.next_page_token returned from + the previous call to ``ListAdaptiveMtDatasets`` method. The + first page is returned if ``page_token``\ is empty or + missing. + filter (str): + Optional. An expression for filtering the + results of the request. Filter is not supported + yet. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAdaptiveMtDatasetsResponse(proto.Message): + r"""A list of AdaptiveMtDatasets. + + Attributes: + adaptive_mt_datasets (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtDataset]): + Output only. A list of Adaptive MT datasets. + next_page_token (str): + Optional. A token to retrieve a page of results. Pass this + value in the [ListAdaptiveMtDatasetsRequest.page_token] + field in the subsequent call to ``ListAdaptiveMtDatasets`` + method to retrieve the next page of results. + """ + + @property + def raw_page(self): + return self + + adaptive_mt_datasets: MutableSequence["AdaptiveMtDataset"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtDataset", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AdaptiveMtTranslateRequest(proto.Message): + r"""The request for sending an AdaptiveMt translation query. + + Attributes: + parent (str): + Required. Location to make a regional call. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + dataset (str): + Required. The resource name for the dataset to use for + adaptive MT. + ``projects/{project}/locations/{location-id}/adaptiveMtDatasets/{dataset}`` + content (MutableSequence[str]): + Required. The content of the input in string + format. For now only one sentence per request is + supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dataset: str = proto.Field( + proto.STRING, + number=2, + ) + content: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class AdaptiveMtTranslation(proto.Message): + r"""An AdaptiveMt translation. + + Attributes: + translated_text (str): + Output only. The translated text. + """ + + translated_text: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AdaptiveMtTranslateResponse(proto.Message): + r"""An AdaptiveMtTranslate response. + + Attributes: + translations (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtTranslation]): + Output only. The translation. + language_code (str): + Output only. The translation's language code. + """ + + translations: MutableSequence["AdaptiveMtTranslation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtTranslation", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AdaptiveMtFile(proto.Message): + r"""An AdaptiveMtFile. + + Attributes: + name (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + display_name (str): + The file's display name. + entry_count (int): + The number of entries that the file contains. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this file was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this file was + last updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + entry_count: int = proto.Field( + proto.INT32, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class GetAdaptiveMtFileRequest(proto.Message): + r"""The request for getting an AdaptiveMtFile. + + Attributes: + name (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteAdaptiveMtFileRequest(proto.Message): + r"""The request for deleting an AdaptiveMt file. + + Attributes: + name (str): + Required. The resource name of the file to delete, in form + of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportAdaptiveMtFileRequest(proto.Message): + r"""The request for importing an AdaptiveMt file along with its + sentences. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}`` + file_input_source (google.cloud.translate_v3.types.FileInputSource): + Inline file source. + + This field is a member of `oneof`_ ``source``. + gcs_input_source (google.cloud.translate_v3.types.GcsInputSource): + Google Cloud Storage file source. + + This field is a member of `oneof`_ ``source``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + file_input_source: common.FileInputSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=common.FileInputSource, + ) + gcs_input_source: common.GcsInputSource = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message=common.GcsInputSource, + ) + + +class ImportAdaptiveMtFileResponse(proto.Message): + r"""The response for importing an AdaptiveMtFile + + Attributes: + adaptive_mt_file (google.cloud.translate_v3.types.AdaptiveMtFile): + Output only. The Adaptive MT file that was + imported. + """ + + adaptive_mt_file: "AdaptiveMtFile" = proto.Field( + proto.MESSAGE, + number=1, + message="AdaptiveMtFile", + ) + + +class ListAdaptiveMtFilesRequest(proto.Message): + r"""The request to list all AdaptiveMt files under a given + dataset. + + Attributes: + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + page_size (int): + Optional. + page_token (str): + Optional. A token identifying a page of results the server + should return. Typically, this is the value of + ListAdaptiveMtFilesResponse.next_page_token returned from + the previous call to ``ListAdaptiveMtFiles`` method. The + first page is returned if ``page_token``\ is empty or + missing. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAdaptiveMtFilesResponse(proto.Message): + r"""The response for listing all AdaptiveMt files under a given + dataset. + + Attributes: + adaptive_mt_files (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtFile]): + Output only. The Adaptive MT files. + next_page_token (str): + Optional. A token to retrieve a page of results. Pass this + value in the ListAdaptiveMtFilesRequest.page_token field in + the subsequent call to ``ListAdaptiveMtFiles`` method to + retrieve the next page of results. + """ + + @property + def raw_page(self): + return self + + adaptive_mt_files: MutableSequence["AdaptiveMtFile"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtFile", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AdaptiveMtSentence(proto.Message): + r"""An AdaptiveMt sentence entry. + + Attributes: + name (str): + Required. The resource name of the file, in form of + ``projects/{project-number-or-id}/locations/{location_id}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}/adaptiveMtSentences/{sentence}`` + source_sentence (str): + Required. The source sentence. + target_sentence (str): + Required. The target sentence. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this sentence was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when this sentence was + last updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_sentence: str = proto.Field( + proto.STRING, + number=2, + ) + target_sentence: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListAdaptiveMtSentencesRequest(proto.Message): + r"""The request for listing Adaptive MT sentences from a + Dataset/File. + + Attributes: + parent (str): + Required. The resource name of the project from which to + list the Adaptive MT files. The following format lists all + sentences under a file. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}`` + The following format lists all sentences within a dataset. + ``projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}`` + page_size (int): + + page_token (str): + A token identifying a page of results the server should + return. Typically, this is the value of + ListAdaptiveMtSentencesRequest.next_page_token returned from + the previous call to ``ListTranslationMemories`` method. The + first page is returned if ``page_token`` is empty or + missing. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAdaptiveMtSentencesResponse(proto.Message): + r"""List AdaptiveMt sentences response. + + Attributes: + adaptive_mt_sentences (MutableSequence[google.cloud.translate_v3.types.AdaptiveMtSentence]): + Output only. The list of AdaptiveMtSentences. + next_page_token (str): + Optional. + """ + + @property + def raw_page(self): + return self + + adaptive_mt_sentences: MutableSequence["AdaptiveMtSentence"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AdaptiveMtSentence", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py new file mode 100644 index 000000000000..e75192298bb5 --- /dev/null +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/common.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.translation.v3", + manifest={ + "GcsInputSource", + "FileInputSource", + "GcsOutputDestination", + }, +) + + +class GcsInputSource(proto.Message): + r"""The Google Cloud Storage location for the input content. + + Attributes: + input_uri (str): + Required. Source data URI. For example, + ``gs://my_bucket/my_object``. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FileInputSource(proto.Message): + r"""An inlined file. + + Attributes: + mime_type (str): + Required. The file's mime type. + content (bytes): + Required. The file's byte contents. + display_name (str): + Required. The file's display name. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + content: bytes = proto.Field( + proto.BYTES, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GcsOutputDestination(proto.Message): + r"""The Google Cloud Storage location for the output content. + + Attributes: + output_uri_prefix (str): + Required. Google Cloud Storage URI to output directory. For + example, ``gs://bucket/directory``. The requesting user must + have write permission to the bucket. The directory will be + created if it doesn't exist. + """ + + output_uri_prefix: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py index 160fc54741f5..8f6dbe4f36eb 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/translation_service.py @@ -17,9 +17,8 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.translation.v3", @@ -1801,9 +1800,9 @@ class BatchTranslateDocumentRequest(proto.Message): Optional. Glossaries to be applied. It's keyed by target language code. format_conversions (MutableMapping[str, str]): - Optional. File format conversion map to be applied to all - input files. Map's key is the original mime_type. Map's - value is the target mime_type of translated documents. + Optional. The file format conversion map that is applied to + all input files. The map key is the original mime_type. The + map value is the target mime_type of translated documents. Supported file format conversion includes: diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py index 82156347f3eb..cd6578cfc6b0 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.13.0" # {x-release-please-version} +__version__ = "3.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py index 5c4d17ec43f6..5c2a2207bce2 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py @@ -33,9 +33,10 @@ from google.api_core import retry_async as retries from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.translate_v3beta1 import gapic_version as package_version from google.oauth2 import service_account # type: ignore +from google.cloud.translate_v3beta1 import gapic_version as package_version + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -43,9 +44,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + from google.cloud.translate_v3beta1.services.translation_service import pagers from google.cloud.translate_v3beta1.types import translation_service -from google.protobuf import timestamp_pb2 # type: ignore from .client import TranslationServiceClient from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py index d9fa9bd56b12..afcaa170c225 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py @@ -37,9 +37,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3beta1 import gapic_version as package_version from google.oauth2 import service_account # type: ignore +from google.cloud.translate_v3beta1 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -47,9 +48,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + from google.cloud.translate_v3beta1.services.translation_service import pagers from google.cloud.translate_v3beta1.types import translation_service -from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .transports.grpc import TranslationServiceGrpcTransport diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py index 3064d9b19609..7c9a5ad76a5f 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py @@ -22,11 +22,12 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.translate_v3beta1 import gapic_version as package_version -from google.cloud.translate_v3beta1.types import translation_service from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.translate_v3beta1 import gapic_version as package_version +from google.cloud.translate_v3beta1.types import translation_service + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py index 66ef7112a0f5..810eb99bbe47 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py @@ -16,14 +16,14 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore - from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3beta1.types import translation_service from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.translate_v3beta1.types import translation_service from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py index 6e175a4b7c8d..373d3bb6c483 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py @@ -16,14 +16,14 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.translate_v3beta1.types import translation_service from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.translate_v3beta1.types import translation_service from .base import DEFAULT_CLIENT_INFO, TranslationServiceTransport from .grpc import TranslationServiceGrpcTransport diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py index 2fe8bbe126fe..688ea51356c0 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py @@ -20,9 +20,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -import grpc # type: ignore -from requests import __version__ as requests_version - from google.api_core import ( gapic_v1, operations_v1, @@ -36,6 +33,8 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -43,9 +42,10 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.cloud.translate_v3beta1.types import translation_service from google.longrunning import operations_pb2 # type: ignore +from google.cloud.translate_v3beta1.types import translation_service + from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import TranslationServiceTransport diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py index 3acbbdfc0004..0c66d71be0df 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py @@ -17,9 +17,8 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.translation.v3beta1", diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json index ecbacba53967..ccae7ff1089a 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3.json @@ -8,9 +8,178 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.13.0" + "version": "3.14.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.adaptive_mt_translate", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.AdaptiveMtTranslate", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "AdaptiveMtTranslate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.AdaptiveMtTranslateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtTranslateResponse", + "shortName": "adaptive_mt_translate" + }, + "description": "Sample for AdaptiveMtTranslate", + "file": "translate_v3_generated_translation_service_adaptive_mt_translate_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_AdaptiveMtTranslate_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_adaptive_mt_translate_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.adaptive_mt_translate", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.AdaptiveMtTranslate", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "AdaptiveMtTranslate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.AdaptiveMtTranslateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "content", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtTranslateResponse", + "shortName": "adaptive_mt_translate" + }, + "description": "Sample for AdaptiveMtTranslate", + "file": "translate_v3_generated_translation_service_adaptive_mt_translate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_AdaptiveMtTranslate_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_adaptive_mt_translate_sync.py" + }, { "canonical": true, "clientMethod": { @@ -365,27 +534,27 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_adaptive_mt_dataset", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.CreateAdaptiveMtDataset", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "CreateGlossary" + "shortName": "CreateAdaptiveMtDataset" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + "type": "google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest" }, { "name": "parent", "type": "str" }, { - "name": "glossary", - "type": "google.cloud.translate_v3.types.Glossary" + "name": "adaptive_mt_dataset", + "type": "google.cloud.translate_v3.types.AdaptiveMtDataset" }, { "name": "retry", @@ -400,22 +569,1503 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_glossary" + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "create_adaptive_mt_dataset" + }, + "description": "Sample for CreateAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_adaptive_mt_dataset", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.CreateAdaptiveMtDataset", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "CreateAdaptiveMtDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.CreateAdaptiveMtDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "adaptive_mt_dataset", + "type": "google.cloud.translate_v3.types.AdaptiveMtDataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "create_adaptive_mt_dataset" + }, + "description": "Sample for CreateAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.create_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "translate_v3_generated_translation_service_create_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_create_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.translate_v3.types.Glossary" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "translate_v3_generated_translation_service_create_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_create_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_dataset", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteAdaptiveMtDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_adaptive_mt_dataset" + }, + "description": "Sample for DeleteAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_dataset", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtDataset", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteAdaptiveMtDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_adaptive_mt_dataset" + }, + "description": "Sample for DeleteAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_adaptive_mt_file" + }, + "description": "Sample for DeleteAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteAdaptiveMtFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_adaptive_mt_file" + }, + "description": "Sample for DeleteAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "translate_v3_generated_translation_service_delete_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "translate_v3_generated_translation_service_delete_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_delete_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.detect_language", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DetectLanguage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DetectLanguageRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "content", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", + "shortName": "detect_language" + }, + "description": "Sample for DetectLanguage", + "file": "translate_v3_generated_translation_service_detect_language_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_detect_language_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.detect_language", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "DetectLanguage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.DetectLanguageRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "mime_type", + "type": "str" + }, + { + "name": "content", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", + "shortName": "detect_language" + }, + "description": "Sample for DetectLanguage", + "file": "translate_v3_generated_translation_service_detect_language_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_detect_language_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_dataset", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetAdaptiveMtDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "get_adaptive_mt_dataset" + }, + "description": "Sample for GetAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_dataset", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtDataset", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetAdaptiveMtDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetAdaptiveMtDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtDataset", + "shortName": "get_adaptive_mt_dataset" + }, + "description": "Sample for GetAdaptiveMtDataset", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", + "shortName": "get_adaptive_mt_file" + }, + "description": "Sample for GetAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_adaptive_mt_file", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetAdaptiveMtFile", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetAdaptiveMtFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetAdaptiveMtFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.AdaptiveMtFile", + "shortName": "get_adaptive_mt_file" + }, + "description": "Sample for GetAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "translate_v3_generated_translation_service_get_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetGlossary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_glossary", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "translate_v3_generated_translation_service_get_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3_generated_TranslationService_GetGlossary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3_generated_translation_service_get_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_supported_languages", + "method": { + "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "service": { + "fullName": "google.cloud.translation.v3.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "GetSupportedLanguages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "display_language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.translate_v3.types.SupportedLanguages", + "shortName": "get_supported_languages" }, - "description": "Sample for CreateGlossary", - "file": "translate_v3_generated_translation_service_create_glossary_async.py", + "description": "Sample for GetSupportedLanguages", + "file": "translate_v3_generated_translation_service_get_supported_languages_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_async", "segments": [ { - "end": 59, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 51, "start": 27, "type": "SHORT" }, @@ -425,22 +2075,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_create_glossary_async.py" + "title": "translate_v3_generated_translation_service_get_supported_languages_async.py" }, { "canonical": true, @@ -449,27 +2099,31 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.create_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_supported_languages", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.CreateGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "CreateGlossary" + "shortName": "GetSupportedLanguages" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.CreateGlossaryRequest" + "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" }, { "name": "parent", "type": "str" }, { - "name": "glossary", - "type": "google.cloud.translate_v3.types.Glossary" + "name": "model", + "type": "str" + }, + { + "name": "display_language_code", + "type": "str" }, { "name": "retry", @@ -484,22 +2138,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_glossary" + "resultType": "google.cloud.translate_v3.types.SupportedLanguages", + "shortName": "get_supported_languages" }, - "description": "Sample for CreateGlossary", - "file": "translate_v3_generated_translation_service_create_glossary_sync.py", + "description": "Sample for GetSupportedLanguages", + "file": "translate_v3_generated_translation_service_get_supported_languages_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_CreateGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_sync", "segments": [ { - "end": 59, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 51, "start": 27, "type": "SHORT" }, @@ -509,22 +2163,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_create_glossary_sync.py" + "title": "translate_v3_generated_translation_service_get_supported_languages_sync.py" }, { "canonical": true, @@ -534,22 +2188,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.delete_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.import_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteGlossary" + "shortName": "ImportAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -565,22 +2219,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_glossary" + "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", + "shortName": "import_adaptive_mt_file" }, - "description": "Sample for DeleteGlossary", - "file": "translate_v3_generated_translation_service_delete_glossary_async.py", + "description": "Sample for ImportAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async", "segments": [ { - "end": 55, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 57, "start": 27, "type": "SHORT" }, @@ -590,22 +2244,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_glossary_async.py" + "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_async.py" }, { "canonical": true, @@ -614,22 +2268,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.delete_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.import_adaptive_mt_file", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DeleteGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.ImportAdaptiveMtFile", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DeleteGlossary" + "shortName": "ImportAdaptiveMtFile" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DeleteGlossaryRequest" + "type": "google.cloud.translate_v3.types.ImportAdaptiveMtFileRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -645,22 +2299,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_glossary" + "resultType": "google.cloud.translate_v3.types.ImportAdaptiveMtFileResponse", + "shortName": "import_adaptive_mt_file" }, - "description": "Sample for DeleteGlossary", - "file": "translate_v3_generated_translation_service_delete_glossary_sync.py", + "description": "Sample for ImportAdaptiveMtFile", + "file": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DeleteGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync", "segments": [ { - "end": 55, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 57, "start": 27, "type": "SHORT" }, @@ -670,22 +2324,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_delete_glossary_sync.py" + "title": "translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py" }, { "canonical": true, @@ -695,36 +2349,24 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.detect_language", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_datasets", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DetectLanguage" + "shortName": "ListAdaptiveMtDatasets" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DetectLanguageRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "model", - "type": "str" - }, - { - "name": "mime_type", - "type": "str" - }, - { - "name": "content", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -738,14 +2380,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", - "shortName": "detect_language" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsAsyncPager", + "shortName": "list_adaptive_mt_datasets" }, - "description": "Sample for DetectLanguage", - "file": "translate_v3_generated_translation_service_detect_language_async.py", + "description": "Sample for ListAdaptiveMtDatasets", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_async", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async", "segments": [ { "end": 52, @@ -763,22 +2405,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_detect_language_async.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py" }, { "canonical": true, @@ -787,36 +2429,24 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.detect_language", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_datasets", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.DetectLanguage", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtDatasets", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "DetectLanguage" + "shortName": "ListAdaptiveMtDatasets" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.DetectLanguageRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtDatasetsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "model", - "type": "str" - }, - { - "name": "mime_type", - "type": "str" - }, - { - "name": "content", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -830,14 +2460,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.DetectLanguageResponse", - "shortName": "detect_language" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtDatasetsPager", + "shortName": "list_adaptive_mt_datasets" }, - "description": "Sample for DetectLanguage", - "file": "translate_v3_generated_translation_service_detect_language_sync.py", + "description": "Sample for ListAdaptiveMtDatasets", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_DetectLanguage_sync", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync", "segments": [ { "end": 52, @@ -855,22 +2485,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_detect_language_sync.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py" }, { "canonical": true, @@ -880,22 +2510,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_files", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetGlossary" + "shortName": "ListAdaptiveMtFiles" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -911,22 +2541,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.Glossary", - "shortName": "get_glossary" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesAsyncPager", + "shortName": "list_adaptive_mt_files" }, - "description": "Sample for GetGlossary", - "file": "translate_v3_generated_translation_service_get_glossary_async.py", + "description": "Sample for ListAdaptiveMtFiles", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetGlossary_async", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -946,12 +2576,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_glossary_async.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_async.py" }, { "canonical": true, @@ -960,22 +2590,22 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_glossary", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_files", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetGlossary", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtFiles", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetGlossary" + "shortName": "ListAdaptiveMtFiles" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetGlossaryRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtFilesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -991,22 +2621,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.Glossary", - "shortName": "get_glossary" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtFilesPager", + "shortName": "list_adaptive_mt_files" }, - "description": "Sample for GetGlossary", - "file": "translate_v3_generated_translation_service_get_glossary_sync.py", + "description": "Sample for ListAdaptiveMtFiles", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetGlossary_sync", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1026,12 +2656,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_glossary_sync.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py" }, { "canonical": true, @@ -1041,32 +2671,24 @@ "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient", "shortName": "TranslationServiceAsyncClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.get_supported_languages", + "fullName": "google.cloud.translate_v3.TranslationServiceAsyncClient.list_adaptive_mt_sentences", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetSupportedLanguages" + "shortName": "ListAdaptiveMtSentences" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" }, { "name": "parent", "type": "str" }, - { - "name": "model", - "type": "str" - }, - { - "name": "display_language_code", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1080,22 +2702,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.SupportedLanguages", - "shortName": "get_supported_languages" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesAsyncPager", + "shortName": "list_adaptive_mt_sentences" }, - "description": "Sample for GetSupportedLanguages", - "file": "translate_v3_generated_translation_service_get_supported_languages_async.py", + "description": "Sample for ListAdaptiveMtSentences", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_async", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1115,12 +2737,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_supported_languages_async.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py" }, { "canonical": true, @@ -1129,32 +2751,24 @@ "fullName": "google.cloud.translate_v3.TranslationServiceClient", "shortName": "TranslationServiceClient" }, - "fullName": "google.cloud.translate_v3.TranslationServiceClient.get_supported_languages", + "fullName": "google.cloud.translate_v3.TranslationServiceClient.list_adaptive_mt_sentences", "method": { - "fullName": "google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + "fullName": "google.cloud.translation.v3.TranslationService.ListAdaptiveMtSentences", "service": { "fullName": "google.cloud.translation.v3.TranslationService", "shortName": "TranslationService" }, - "shortName": "GetSupportedLanguages" + "shortName": "ListAdaptiveMtSentences" }, "parameters": [ { "name": "request", - "type": "google.cloud.translate_v3.types.GetSupportedLanguagesRequest" + "type": "google.cloud.translate_v3.types.ListAdaptiveMtSentencesRequest" }, { "name": "parent", "type": "str" }, - { - "name": "model", - "type": "str" - }, - { - "name": "display_language_code", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1168,22 +2782,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.translate_v3.types.SupportedLanguages", - "shortName": "get_supported_languages" + "resultType": "google.cloud.translate_v3.services.translation_service.pagers.ListAdaptiveMtSentencesPager", + "shortName": "list_adaptive_mt_sentences" }, - "description": "Sample for GetSupportedLanguages", - "file": "translate_v3_generated_translation_service_get_supported_languages_sync.py", + "description": "Sample for ListAdaptiveMtSentences", + "file": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "translate_v3_generated_TranslationService_GetSupportedLanguages_sync", + "regionTag": "translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1203,12 +2817,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "translate_v3_generated_translation_service_get_supported_languages_sync.py" + "title": "translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json index 8c78ab1d4002..c7b3f4668902 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-translate", - "version": "3.13.0" + "version": "3.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py new file mode 100644 index 000000000000..9dd123d9ad97 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AdaptiveMtTranslate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_AdaptiveMtTranslate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = await client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_AdaptiveMtTranslate_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py new file mode 100644 index 000000000000..275648c1a429 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AdaptiveMtTranslate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_AdaptiveMtTranslate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_adaptive_mt_translate(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.AdaptiveMtTranslateRequest( + parent="parent_value", + dataset="dataset_value", + content=['content_value1', 'content_value2'], + ) + + # Make the request + response = client.adaptive_mt_translate(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_AdaptiveMtTranslate_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py new file mode 100644 index 000000000000..ff9b59dcc7b0 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_create_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, + ) + + # Make the request + response = await client.create_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py new file mode 100644 index 000000000000..df803a45de73 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_create_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + adaptive_mt_dataset = translate_v3.AdaptiveMtDataset() + adaptive_mt_dataset.name = "name_value" + + request = translate_v3.CreateAdaptiveMtDatasetRequest( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt_dataset, + ) + + # Make the request + response = client.create_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py new file mode 100644 index 000000000000..e26d048f79c5 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_dataset(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py new file mode 100644 index 000000000000..79e165e0d811 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_dataset_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_delete_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_dataset(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteAdaptiveMtDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py new file mode 100644 index 000000000000..af6e726b9fd5 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + await client.delete_adaptive_mt_file(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py new file mode 100644 index 000000000000..e415e826c0d5 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_adaptive_mt_file_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_delete_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.DeleteAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + client.delete_adaptive_mt_file(request=request) + + +# [END translate_v3_generated_TranslationService_DeleteAdaptiveMtFile_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py new file mode 100644 index 000000000000..fc944dd0ab8d --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py new file mode 100644 index 000000000000..d7ca19ffc845 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAdaptiveMtDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_get_adaptive_mt_dataset(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_dataset(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py new file mode 100644 index 000000000000..d8386c5846c8 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetAdaptiveMtFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetAdaptiveMtFile_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py new file mode 100644 index 000000000000..d88fd7525bd6 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_get_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.GetAdaptiveMtFileRequest( + name="name_value", + ) + + # Make the request + response = client.get_adaptive_mt_file(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py new file mode 100644 index 000000000000..aa59c25263aa --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py new file mode 100644 index 000000000000..2ef27f75ad6e --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAdaptiveMtFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_import_adaptive_mt_file(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + file_input_source = translate_v3.FileInputSource() + file_input_source.mime_type = "mime_type_value" + file_input_source.content = b'content_blob' + file_input_source.display_name = "display_name_value" + + request = translate_v3.ImportAdaptiveMtFileRequest( + file_input_source=file_input_source, + parent="parent_value", + ) + + # Make the request + response = client.import_adaptive_mt_file(request=request) + + # Handle the response + print(response) + +# [END translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py new file mode 100644 index 000000000000..08c122ffc40c --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py new file mode 100644 index 000000000000..d2855d9da901 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_adaptive_mt_datasets(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py new file mode 100644 index 000000000000..7831238bf875 --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py new file mode 100644 index 000000000000..d526f5d91d5e --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_adaptive_mt_files(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtFilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_files(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py new file mode 100644 index 000000000000..ae959c4e0cdb --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtSentences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +async def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceAsyncClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py new file mode 100644 index 000000000000..223d5c66666b --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAdaptiveMtSentences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3 + + +def sample_list_adaptive_mt_sentences(): + # Create a client + client = translate_v3.TranslationServiceClient() + + # Initialize request argument(s) + request = translate_v3.ListAdaptiveMtSentencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_adaptive_mt_sentences(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync] diff --git a/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py b/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py index ef488ce1f7b8..9337f07f0f28 100644 --- a/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py +++ b/packages/google-cloud-translate/scripts/fixup_translate_v3_keywords.py @@ -39,13 +39,23 @@ def partition( class translateCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'adaptive_mt_translate': ('parent', 'dataset', 'content', ), 'batch_translate_document': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'format_conversions', 'customized_attribution', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), 'batch_translate_text': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'labels', ), + 'create_adaptive_mt_dataset': ('parent', 'adaptive_mt_dataset', ), 'create_glossary': ('parent', 'glossary', ), + 'delete_adaptive_mt_dataset': ('name', ), + 'delete_adaptive_mt_file': ('name', ), 'delete_glossary': ('name', ), 'detect_language': ('parent', 'model', 'content', 'mime_type', 'labels', ), + 'get_adaptive_mt_dataset': ('name', ), + 'get_adaptive_mt_file': ('name', ), 'get_glossary': ('name', ), 'get_supported_languages': ('parent', 'display_language_code', 'model', ), + 'import_adaptive_mt_file': ('parent', 'file_input_source', 'gcs_input_source', ), + 'list_adaptive_mt_datasets': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_adaptive_mt_files': ('parent', 'page_size', 'page_token', ), + 'list_adaptive_mt_sentences': ('parent', 'page_size', 'page_token', ), 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', ), 'translate_document': ('parent', 'target_language_code', 'document_input_config', 'source_language_code', 'document_output_config', 'model', 'glossary_config', 'labels', 'customized_attribution', 'is_translate_native_pdf_only', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), 'translate_text': ('contents', 'target_language_code', 'parent', 'mime_type', 'source_language_code', 'model', 'glossary_config', 'labels', ), diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py index 1e00bc3dd584..5c3a83581afa 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py @@ -26,14 +26,6 @@ import json import math -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - from google.api_core import ( future, gapic_v1, @@ -49,17 +41,25 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + from google.cloud.translate_v3.services.translation_service import ( TranslationServiceAsyncClient, TranslationServiceClient, pagers, transports, ) -from google.cloud.translate_v3.types import translation_service -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore +from google.cloud.translate_v3.types import adaptive_mt, common, translation_service def client_cert_source_callback(): @@ -3320,191 +3320,6231 @@ async def test_delete_glossary_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - translation_service.TranslateTextRequest, + adaptive_mt.CreateAdaptiveMtDatasetRequest, dict, ], ) -def test_translate_text_rest(request_type): +def test_create_adaptive_mt_dataset(request_type, transport: str = "grpc"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + response = client.create_adaptive_mt_dataset(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.translate_text(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateTextResponse) + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 -def test_translate_text_rest_required_fields( - request_type=translation_service.TranslateTextRequest, -): - transport_class = transports.TranslationServiceRestTransport +def test_create_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - request_init = {} - request_init["contents"] = "" - request_init["target_language_code"] = "" - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + client.create_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() + + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + ) + response = await client.create_adaptive_mt_dataset(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.CreateAdaptiveMtDatasetRequest() - jsonified_request["contents"] = "contents_value" - jsonified_request["targetLanguageCode"] = "target_language_code_value" - jsonified_request["parent"] = "parent_value" + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).translate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "contents" in jsonified_request - assert jsonified_request["contents"] == "contents_value" - assert "targetLanguageCode" in jsonified_request - assert jsonified_request["targetLanguageCode"] == "target_language_code_value" - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_async_from_dict(): + await test_create_adaptive_mt_dataset_async(request_type=dict) + +def test_create_adaptive_mt_dataset_field_headers(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() - # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtDataset() + client.create_adaptive_mt_dataset(request) - response = client.translate_text(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_translate_text_rest_unset_required_fields(): - transport = transports.TranslationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.translate_text._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "contents", - "targetLanguageCode", - "parent", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() ) - ) + await client.create_adaptive_mt_dataset(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_translate_text_rest_interceptors(null_interceptor): - transport = transports.TranslationServiceRestTransport( + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_adaptive_mt_dataset_flattened(): + client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TranslationServiceRestInterceptor(), ) - client = TranslationServiceClient(transport=transport) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_translate_text" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_translate_text" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.TranslateTextRequest.pb( - translation_service.TranslateTextRequest() + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_adaptive_mt_dataset( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adaptive_mt_dataset + mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + assert arg == mock_val + + +def test_create_adaptive_mt_dataset_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_adaptive_mt_dataset( + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].adaptive_mt_dataset + mock_val = adaptive_mt.AdaptiveMtDataset(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), + parent="parent_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtDatasetRequest, + dict, + ], +) +def test_delete_adaptive_mt_dataset(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + client.delete_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_async_from_dict(): + await test_delete_adaptive_mt_dataset_async(request_type=dict) + + +def test_delete_adaptive_mt_dataset_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = None + client.delete_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_adaptive_mt_dataset_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_adaptive_mt_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_adaptive_mt_dataset_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_adaptive_mt_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.GetAdaptiveMtDatasetRequest, + dict, + ], +) +def test_get_adaptive_mt_dataset(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + response = client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + + +def test_get_adaptive_mt_dataset_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + client.get_adaptive_mt_dataset() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + ) + response = await client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_async_from_dict(): + await test_get_adaptive_mt_dataset_async(request_type=dict) + + +def test_get_adaptive_mt_dataset_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtDataset() + client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtDatasetRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + await client.get_adaptive_mt_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_adaptive_mt_dataset_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_adaptive_mt_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_adaptive_mt_dataset_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtDataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtDataset() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_adaptive_mt_dataset( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_dataset_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtDatasetsRequest, + dict, + ], +) +def test_list_adaptive_mt_datasets(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_datasets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + client.list_adaptive_mt_datasets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtDatasetsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_from_dict(): + await test_list_adaptive_mt_datasets_async(request_type=dict) + + +def test_list_adaptive_mt_datasets_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse() + ) + await client.list_adaptive_mt_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_adaptive_mt_datasets_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_adaptive_mt_datasets_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_datasets( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_datasets_pager(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_adaptive_mt_datasets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) + + +def test_list_adaptive_mt_datasets_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_adaptive_mt_datasets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_adaptive_mt_datasets( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_datasets_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_datasets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.AdaptiveMtTranslateRequest, + dict, + ], +) +def test_adaptive_mt_translate(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) + response = client.adaptive_mt_translate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" + + +def test_adaptive_mt_translate_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + client.adaptive_mt_translate() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.AdaptiveMtTranslateRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) + ) + response = await client.adaptive_mt_translate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_async_from_dict(): + await test_adaptive_mt_translate_async(request_type=dict) + + +def test_adaptive_mt_translate_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.AdaptiveMtTranslateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + client.adaptive_mt_translate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.AdaptiveMtTranslateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse() + ) + await client.adaptive_mt_translate(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_adaptive_mt_translate_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.adaptive_mt_translate( + parent="parent_value", + content=["content_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = ["content_value"] + assert arg == mock_val + + +def test_adaptive_mt_translate_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), + parent="parent_value", + content=["content_value"], + ) + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.adaptive_mt_translate), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtTranslateResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.adaptive_mt_translate( + parent="parent_value", + content=["content_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = ["content_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_adaptive_mt_translate_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), + parent="parent_value", + content=["content_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.GetAdaptiveMtFileRequest, + dict, + ], +) +def test_get_adaptive_mt_file(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) + response = client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 + + +def test_get_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + client.get_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.GetAdaptiveMtFileRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) + ) + response = await client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.GetAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_async_from_dict(): + await test_get_adaptive_mt_file_async(request_type=dict) + + +def test_get_adaptive_mt_file_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value = adaptive_mt.AdaptiveMtFile() + client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.GetAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile() + ) + await client.get_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_adaptive_mt_file_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_adaptive_mt_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_adaptive_mt_file_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.AdaptiveMtFile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.AdaptiveMtFile() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_adaptive_mt_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.DeleteAdaptiveMtFileRequest, + dict, + ], +) +def test_delete_adaptive_mt_file(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + client.delete_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.DeleteAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_async_from_dict(): + await test_delete_adaptive_mt_file_async(request_type=dict) + + +def test_delete_adaptive_mt_file_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value = None + client.delete_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.DeleteAdaptiveMtFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_adaptive_mt_file_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_adaptive_mt_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_adaptive_mt_file_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_adaptive_mt_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ImportAdaptiveMtFileRequest, + dict, + ], +) +def test_import_adaptive_mt_file(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + response = client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + + +def test_import_adaptive_mt_file_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + client.import_adaptive_mt_file() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ImportAdaptiveMtFileRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + response = await client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ImportAdaptiveMtFileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_async_from_dict(): + await test_import_adaptive_mt_file_async(request_type=dict) + + +def test_import_adaptive_mt_file_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ImportAdaptiveMtFileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ImportAdaptiveMtFileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + await client.import_adaptive_mt_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_import_adaptive_mt_file_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_adaptive_mt_file( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_import_adaptive_mt_file_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_adaptive_mt_file), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ImportAdaptiveMtFileResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_adaptive_mt_file( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_import_adaptive_mt_file_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtFilesRequest, + dict, + ], +) +def test_list_adaptive_mt_files(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_adaptive_mt_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtFilesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_files_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + client.list_adaptive_mt_files() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async( + transport: str = "grpc_asyncio", request_type=adaptive_mt.ListAdaptiveMtFilesRequest +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtFilesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtFilesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_from_dict(): + await test_list_adaptive_mt_files_async(request_type=dict) + + +def test_list_adaptive_mt_files_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtFilesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + client.list_adaptive_mt_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtFilesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse() + ) + await client.list_adaptive_mt_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_adaptive_mt_files_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_files( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_adaptive_mt_files_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtFilesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_files( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_files_pager(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_adaptive_mt_files(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) + + +def test_list_adaptive_mt_files_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_adaptive_mt_files(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_adaptive_mt_files( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in responses) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_files_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_files), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_files(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.ListAdaptiveMtSentencesRequest, + dict, + ], +) +def test_list_adaptive_mt_sentences(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_adaptive_mt_sentences_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + client.list_adaptive_mt_sentences() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async( + transport: str = "grpc_asyncio", + request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, +): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == adaptive_mt.ListAdaptiveMtSentencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdaptiveMtSentencesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_from_dict(): + await test_list_adaptive_mt_sentences_async(request_type=dict) + + +def test_list_adaptive_mt_sentences_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = adaptive_mt.ListAdaptiveMtSentencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse() + ) + await client.list_adaptive_mt_sentences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_adaptive_mt_sentences_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_adaptive_mt_sentences( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_adaptive_mt_sentences_flattened_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_flattened_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + adaptive_mt.ListAdaptiveMtSentencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_adaptive_mt_sentences( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_flattened_error_async(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_sentences_pager(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_adaptive_mt_sentences(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) + + +def test_list_adaptive_mt_sentences_pages(transport_name: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + pages = list(client.list_adaptive_mt_sentences(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_pager(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_adaptive_mt_sentences( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in responses) + + +@pytest.mark.asyncio +async def test_list_adaptive_mt_sentences_async_pages(): + client = TranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_adaptive_mt_sentences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_adaptive_mt_sentences(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateTextRequest, + dict, + ], +) +def test_translate_text_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.translate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateTextResponse) + + +def test_translate_text_rest_required_fields( + request_type=translation_service.TranslateTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["contents"] = "" + request_init["target_language_code"] = "" + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["contents"] = "contents_value" + jsonified_request["targetLanguageCode"] = "target_language_code_value" + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "contents" in jsonified_request + assert jsonified_request["contents"] == "contents_value" + assert "targetLanguageCode" in jsonified_request + assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.translate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_translate_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.translate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "contents", + "targetLanguageCode", + "parent", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_translate_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_translate_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_translate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.TranslateTextRequest.pb( + translation_service.TranslateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.TranslateTextResponse.to_json( + translation_service.TranslateTextResponse() + ) + + request = translation_service.TranslateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.TranslateTextResponse() + + client.translate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_translate_text_rest_bad_request( + transport: str = "rest", request_type=translation_service.TranslateTextRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.translate_text(request) + + +def test_translate_text_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target_language_code="target_language_code_value", + contents=["contents_value"], + model="model_value", + mime_type="mime_type_value", + source_language_code="source_language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.translate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:translateText" + % client.transport._host, + args[1], + ) + + +def test_translate_text_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.translate_text( + translation_service.TranslateTextRequest(), + parent="parent_value", + target_language_code="target_language_code_value", + contents=["contents_value"], + model="model_value", + mime_type="mime_type_value", + source_language_code="source_language_code_value", + ) + + +def test_translate_text_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.DetectLanguageRequest, + dict, + ], +) +def test_detect_language_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detect_language(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.DetectLanguageResponse) + + +def test_detect_language_rest_required_fields( + request_type=translation_service.DetectLanguageRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detect_language._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detect_language._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detect_language(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detect_language_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detect_language._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detect_language_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_detect_language" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_detect_language" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.DetectLanguageRequest.pb( + translation_service.DetectLanguageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.DetectLanguageResponse.to_json( + translation_service.DetectLanguageResponse() + ) + + request = translation_service.DetectLanguageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.DetectLanguageResponse() + + client.detect_language( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detect_language_rest_bad_request( + transport: str = "rest", request_type=translation_service.DetectLanguageRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detect_language(request) + + +def test_detect_language_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.DetectLanguageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + model="model_value", + mime_type="mime_type_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.DetectLanguageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.detect_language(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:detectLanguage" + % client.transport._host, + args[1], + ) + + +def test_detect_language_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detect_language( + translation_service.DetectLanguageRequest(), + parent="parent_value", + model="model_value", + mime_type="mime_type_value", + content="content_value", + ) + + +def test_detect_language_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetSupportedLanguagesRequest, + dict, + ], +) +def test_get_supported_languages_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_supported_languages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.SupportedLanguages) + + +def test_get_supported_languages_rest_required_fields( + request_type=translation_service.GetSupportedLanguagesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_supported_languages._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_supported_languages._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "display_language_code", + "model", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_supported_languages(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_supported_languages_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_supported_languages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "displayLanguageCode", + "model", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_supported_languages_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_supported_languages" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_supported_languages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.GetSupportedLanguagesRequest.pb( + translation_service.GetSupportedLanguagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.SupportedLanguages.to_json( + translation_service.SupportedLanguages() + ) + + request = translation_service.GetSupportedLanguagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.SupportedLanguages() + + client.get_supported_languages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_supported_languages_rest_bad_request( + transport: str = "rest", + request_type=translation_service.GetSupportedLanguagesRequest, +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_supported_languages(request) + + +def test_get_supported_languages_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.SupportedLanguages() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.SupportedLanguages.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_supported_languages(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/supportedLanguages" + % client.transport._host, + args[1], + ) + + +def test_get_supported_languages_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_supported_languages( + translation_service.GetSupportedLanguagesRequest(), + parent="parent_value", + model="model_value", + display_language_code="display_language_code_value", + ) + + +def test_get_supported_languages_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateDocumentRequest, + dict, + ], +) +def test_translate_document_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateDocumentResponse( + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.TranslateDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.translate_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateDocumentResponse) + assert response.model == "model_value" + + +def test_translate_document_rest_required_fields( + request_type=translation_service.TranslateDocumentRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["target_language_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["targetLanguageCode"] = "target_language_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetLanguageCode" in jsonified_request + assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.TranslateDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.TranslateDocumentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.translate_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_translate_document_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.translate_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "targetLanguageCode", + "documentInputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_translate_document_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_translate_document" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_translate_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.TranslateDocumentRequest.pb( + translation_service.TranslateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + translation_service.TranslateDocumentResponse.to_json( + translation_service.TranslateDocumentResponse() + ) + ) + + request = translation_service.TranslateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.TranslateDocumentResponse() + + client.translate_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_translate_document_rest_bad_request( + transport: str = "rest", request_type=translation_service.TranslateDocumentRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.translate_document(request) + + +def test_translate_document_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.BatchTranslateTextRequest, + dict, + ], +) +def test_batch_translate_text_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_translate_text(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_translate_text_rest_required_fields( + request_type=translation_service.BatchTranslateTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["source_language_code"] = "" + request_init["target_language_codes"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["sourceLanguageCode"] = "source_language_code_value" + jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sourceLanguageCode" in jsonified_request + assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" + assert "targetLanguageCodes" in jsonified_request + assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_translate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_translate_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_translate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "sourceLanguageCode", + "targetLanguageCodes", + "inputConfigs", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_translate_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_batch_translate_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_batch_translate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.BatchTranslateTextRequest.pb( + translation_service.BatchTranslateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.BatchTranslateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_translate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_translate_text_rest_bad_request( + transport: str = "rest", request_type=translation_service.BatchTranslateTextRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_translate_text(request) + + +def test_batch_translate_text_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.BatchTranslateDocumentRequest, + dict, + ], +) +def test_batch_translate_document_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_translate_document(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_translate_document_rest_required_fields( + request_type=translation_service.BatchTranslateDocumentRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["source_language_code"] = "" + request_init["target_language_codes"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["sourceLanguageCode"] = "source_language_code_value" + jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_translate_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sourceLanguageCode" in jsonified_request + assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" + assert "targetLanguageCodes" in jsonified_request + assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_translate_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_translate_document_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_translate_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "sourceLanguageCode", + "targetLanguageCodes", + "inputConfigs", + "outputConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_translate_document_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_batch_translate_document" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_batch_translate_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.BatchTranslateDocumentRequest.pb( + translation_service.BatchTranslateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.BatchTranslateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_translate_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_translate_document_rest_bad_request( + transport: str = "rest", + request_type=translation_service.BatchTranslateDocumentRequest, +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_translate_document(request) + + +def test_batch_translate_document_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_translate_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:batchTranslateDocument" + % client.transport._host, + args[1], + ) + + +def test_batch_translate_document_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_translate_document( + translation_service.BatchTranslateDocumentRequest(), + parent="parent_value", + source_language_code="source_language_code_value", + target_language_codes=["target_language_codes_value"], + input_configs=[ + translation_service.BatchDocumentInputConfig( + gcs_source=translation_service.GcsSource( + input_uri="input_uri_value" + ) + ) + ], + output_config=translation_service.BatchDocumentOutputConfig( + gcs_destination=translation_service.GcsDestination( + output_uri_prefix="output_uri_prefix_value" + ) + ), + ) + + +def test_batch_translate_document_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.CreateGlossaryRequest, + dict, + ], +) +def test_create_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["glossary"] = { + "name": "name_value", + "language_pair": { + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + }, + "language_codes_set": { + "language_codes": ["language_codes_value1", "language_codes_value2"] + }, + "input_config": {"gcs_source": {"input_uri": "input_uri_value"}}, + "entry_count": 1210, + "submit_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "display_name": "display_name_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = translation_service.CreateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_glossary(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_glossary_rest_required_fields( + request_type=translation_service.CreateGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "glossary", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.CreateGlossaryRequest.pb( + translation_service.CreateGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.CreateGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.CreateGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_glossary(request) + + +def test_create_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_create_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + translation_service.CreateGlossaryRequest(), + parent="parent_value", + glossary=translation_service.Glossary(name="name_value"), + ) + + +def test_create_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.ListGlossariesRequest, + dict, + ], +) +def test_list_glossaries_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_glossaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_glossaries_rest_required_fields( + request_type=translation_service.ListGlossariesRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_glossaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossaries_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossaries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossaries_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_list_glossaries" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_list_glossaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.ListGlossariesRequest.pb( + translation_service.ListGlossariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.ListGlossariesResponse.to_json( + translation_service.ListGlossariesResponse() + ) + + request = translation_service.ListGlossariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.ListGlossariesResponse() + + client.list_glossaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_glossaries_rest_bad_request( + transport: str = "rest", request_type=translation_service.ListGlossariesRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_glossaries(request) + + +def test_list_glossaries_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.ListGlossariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_glossaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_list_glossaries_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + translation_service.ListGlossariesRequest(), + parent="parent_value", + ) + + +def test_list_glossaries_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + translation_service.Glossary(), + ], + next_page_token="abc", + ), + translation_service.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + ], + next_page_token="ghi", + ), + translation_service.ListGlossariesResponse( + glossaries=[ + translation_service.Glossary(), + translation_service.Glossary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + translation_service.ListGlossariesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_glossaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, translation_service.Glossary) for i in results) + + pages = list(client.list_glossaries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary( + name="name_value", + entry_count=1210, + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_glossary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.Glossary) + assert response.name == "name_value" + assert response.entry_count == 1210 + assert response.display_name == "display_name_value" + + +def test_get_glossary_rest_required_fields( + request_type=translation_service.GetGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_get_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_get_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.GetGlossaryRequest.pb( + translation_service.GetGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = translation_service.Glossary.to_json( + translation_service.Glossary() + ) + + request = translation_service.GetGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.Glossary() + + client.get_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.GetGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_glossary(request) + + +def test_get_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.Glossary() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = translation_service.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_get_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + translation_service.GetGlossaryRequest(), + name="name_value", + ) + + +def test_get_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.DeleteGlossaryRequest, + dict, + ], +) +def test_delete_glossary_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_glossary(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_glossary_rest_required_fields( + request_type=translation_service.DeleteGlossaryRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_delete_glossary" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_delete_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = translation_service.DeleteGlossaryRequest.pb( + translation_service.DeleteGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = translation_service.DeleteGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_glossary_rest_bad_request( + transport: str = "rest", request_type=translation_service.DeleteGlossaryRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_glossary(request) + + +def test_delete_glossary_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_delete_glossary_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + translation_service.DeleteGlossaryRequest(), + name="name_value", + ) + + +def test_delete_glossary_rest_error(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + adaptive_mt.CreateAdaptiveMtDatasetRequest, + dict, + ], +) +def test_create_adaptive_mt_dataset_rest(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["adaptive_mt_dataset"] = { + "name": "name_value", + "display_name": "display_name_value", + "source_language_code": "source_language_code_value", + "target_language_code": "target_language_code_value", + "example_count": 1396, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = adaptive_mt.CreateAdaptiveMtDatasetRequest.meta.fields[ + "adaptive_mt_dataset" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["adaptive_mt_dataset"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["adaptive_mt_dataset"][field])): + del request_init["adaptive_mt_dataset"][field][i][subfield] + else: + del request_init["adaptive_mt_dataset"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_adaptive_mt_dataset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 + + +def test_create_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtDataset() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_adaptive_mt_dataset(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_adaptive_mt_dataset_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "adaptiveMtDataset", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_adaptive_mt_dataset_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_create_adaptive_mt_dataset" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_create_adaptive_mt_dataset" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = adaptive_mt.CreateAdaptiveMtDatasetRequest.pb( + adaptive_mt.CreateAdaptiveMtDatasetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.TranslateTextResponse.to_json( - translation_service.TranslateTextResponse() + req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( + adaptive_mt.AdaptiveMtDataset() ) - request = translation_service.TranslateTextRequest() + request = adaptive_mt.CreateAdaptiveMtDatasetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.TranslateTextResponse() + post.return_value = adaptive_mt.AdaptiveMtDataset() - client.translate_text( + client.create_adaptive_mt_dataset( request, metadata=[ ("key", "val"), @@ -3516,8 +9556,8 @@ def test_translate_text_rest_interceptors(null_interceptor): post.assert_called_once() -def test_translate_text_rest_bad_request( - transport: str = "rest", request_type=translation_service.TranslateTextRequest +def test_create_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.CreateAdaptiveMtDatasetRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3537,10 +9577,10 @@ def test_translate_text_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.translate_text(request) + client.create_adaptive_mt_dataset(request) -def test_translate_text_rest_flattened(): +def test_create_adaptive_mt_dataset_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3549,7 +9589,7 @@ def test_translate_text_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateTextResponse() + return_value = adaptive_mt.AdaptiveMtDataset() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -3557,11 +9597,7 @@ def test_translate_text_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - target_language_code="target_language_code_value", - contents=["contents_value"], - model="model_value", - mime_type="mime_type_value", - source_language_code="source_language_code_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) mock_args.update(sample_request) @@ -3569,25 +9605,25 @@ def test_translate_text_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.TranslateTextResponse.pb(return_value) + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.translate_text(**mock_args) + client.create_adaptive_mt_dataset(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:translateText" + "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" % client.transport._host, args[1], ) -def test_translate_text_rest_flattened_error(transport: str = "rest"): +def test_create_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3596,18 +9632,14 @@ def test_translate_text_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.translate_text( - translation_service.TranslateTextRequest(), + client.create_adaptive_mt_dataset( + adaptive_mt.CreateAdaptiveMtDatasetRequest(), parent="parent_value", - target_language_code="target_language_code_value", - contents=["contents_value"], - model="model_value", - mime_type="mime_type_value", - source_language_code="source_language_code_value", + adaptive_mt_dataset=adaptive_mt.AdaptiveMtDataset(name="name_value"), ) -def test_translate_text_rest_error(): +def test_create_adaptive_mt_dataset_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3616,47 +9648,47 @@ def test_translate_text_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.DetectLanguageRequest, + adaptive_mt.DeleteAdaptiveMtDatasetRequest, dict, ], ) -def test_detect_language_rest(request_type): +def test_delete_adaptive_mt_dataset_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.detect_language(request) + response = client.delete_adaptive_mt_dataset(request) # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.DetectLanguageResponse) + assert response is None -def test_detect_language_rest_required_fields( - request_type=translation_service.DetectLanguageRequest, +def test_delete_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3671,21 +9703,21 @@ def test_detect_language_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).detect_language._get_unset_required_fields(jsonified_request) + ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).detect_language._get_unset_required_fields(jsonified_request) + ).delete_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3694,7 +9726,7 @@ def test_detect_language_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3706,40 +9738,36 @@ def test_detect_language_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.detect_language(request) + response = client.delete_adaptive_mt_dataset(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_detect_language_rest_unset_required_fields(): +def test_delete_adaptive_mt_dataset_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.detect_language._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.delete_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_detect_language_rest_interceptors(null_interceptor): +def test_delete_adaptive_mt_dataset_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3752,14 +9780,11 @@ def test_detect_language_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_detect_language" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_detect_language" + transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_dataset" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.DetectLanguageRequest.pb( - translation_service.DetectLanguageRequest() + pb_message = adaptive_mt.DeleteAdaptiveMtDatasetRequest.pb( + adaptive_mt.DeleteAdaptiveMtDatasetRequest() ) transcode.return_value = { "method": "post", @@ -3771,19 +9796,15 @@ def test_detect_language_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.DetectLanguageResponse.to_json( - translation_service.DetectLanguageResponse() - ) - request = translation_service.DetectLanguageRequest() + request = adaptive_mt.DeleteAdaptiveMtDatasetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.DetectLanguageResponse() - client.detect_language( + client.delete_adaptive_mt_dataset( request, metadata=[ ("key", "val"), @@ -3792,11 +9813,10 @@ def test_detect_language_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_detect_language_rest_bad_request( - transport: str = "rest", request_type=translation_service.DetectLanguageRequest +def test_delete_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtDatasetRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3804,7 +9824,9 @@ def test_detect_language_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3816,10 +9838,10 @@ def test_detect_language_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.detect_language(request) + client.delete_adaptive_mt_dataset(request) -def test_detect_language_rest_flattened(): +def test_delete_adaptive_mt_dataset_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3828,42 +9850,40 @@ def test_detect_language_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.DetectLanguageResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - model="model_value", - mime_type="mime_type_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = translation_service.DetectLanguageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.detect_language(**mock_args) + client.delete_adaptive_mt_dataset(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:detectLanguage" + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" % client.transport._host, args[1], ) -def test_detect_language_rest_flattened_error(transport: str = "rest"): +def test_delete_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3872,16 +9892,13 @@ def test_detect_language_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.detect_language( - translation_service.DetectLanguageRequest(), - parent="parent_value", - model="model_value", - mime_type="mime_type_value", - content="content_value", + client.delete_adaptive_mt_dataset( + adaptive_mt.DeleteAdaptiveMtDatasetRequest(), + name="name_value", ) -def test_detect_language_rest_error(): +def test_delete_adaptive_mt_dataset_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3890,47 +9907,60 @@ def test_detect_language_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.GetSupportedLanguagesRequest, + adaptive_mt.GetAdaptiveMtDatasetRequest, dict, ], ) -def test_get_supported_languages_rest(request_type): +def test_get_adaptive_mt_dataset_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() + return_value = adaptive_mt.AdaptiveMtDataset( + name="name_value", + display_name="display_name_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + example_count=1396, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_supported_languages(request) + response = client.get_adaptive_mt_dataset(request) # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.SupportedLanguages) + assert isinstance(response, adaptive_mt.AdaptiveMtDataset) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_language_code == "source_language_code_value" + assert response.target_language_code == "target_language_code_value" + assert response.example_count == 1396 -def test_get_supported_languages_rest_required_fields( - request_type=translation_service.GetSupportedLanguagesRequest, +def test_get_adaptive_mt_dataset_rest_required_fields( + request_type=adaptive_mt.GetAdaptiveMtDatasetRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3945,28 +9975,21 @@ def test_get_supported_languages_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_supported_languages._get_unset_required_fields(jsonified_request) + ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_supported_languages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "display_language_code", - "model", - ) - ) + ).get_adaptive_mt_dataset._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3975,7 +9998,7 @@ def test_get_supported_languages_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() + return_value = adaptive_mt.AdaptiveMtDataset() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3996,38 +10019,30 @@ def test_get_supported_languages_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_supported_languages(request) + response = client.get_adaptive_mt_dataset(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_supported_languages_rest_unset_required_fields(): +def test_get_adaptive_mt_dataset_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_supported_languages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "displayLanguageCode", - "model", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_adaptive_mt_dataset._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_supported_languages_rest_interceptors(null_interceptor): +def test_get_adaptive_mt_dataset_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4040,14 +10055,14 @@ def test_get_supported_languages_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_supported_languages" + transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_dataset" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_supported_languages" + transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.GetSupportedLanguagesRequest.pb( - translation_service.GetSupportedLanguagesRequest() + pb_message = adaptive_mt.GetAdaptiveMtDatasetRequest.pb( + adaptive_mt.GetAdaptiveMtDatasetRequest() ) transcode.return_value = { "method": "post", @@ -4059,19 +10074,19 @@ def test_get_supported_languages_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.SupportedLanguages.to_json( - translation_service.SupportedLanguages() + req.return_value._content = adaptive_mt.AdaptiveMtDataset.to_json( + adaptive_mt.AdaptiveMtDataset() ) - request = translation_service.GetSupportedLanguagesRequest() + request = adaptive_mt.GetAdaptiveMtDatasetRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.SupportedLanguages() + post.return_value = adaptive_mt.AdaptiveMtDataset() - client.get_supported_languages( + client.get_adaptive_mt_dataset( request, metadata=[ ("key", "val"), @@ -4083,9 +10098,8 @@ def test_get_supported_languages_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_supported_languages_rest_bad_request( - transport: str = "rest", - request_type=translation_service.GetSupportedLanguagesRequest, +def test_get_adaptive_mt_dataset_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtDatasetRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4093,7 +10107,9 @@ def test_get_supported_languages_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4105,10 +10121,10 @@ def test_get_supported_languages_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_supported_languages(request) + client.get_adaptive_mt_dataset(request) -def test_get_supported_languages_rest_flattened(): +def test_get_adaptive_mt_dataset_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4117,16 +10133,16 @@ def test_get_supported_languages_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.SupportedLanguages() + return_value = adaptive_mt.AdaptiveMtDataset() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - model="model_value", - display_language_code="display_language_code_value", + name="name_value", ) mock_args.update(sample_request) @@ -4134,25 +10150,25 @@ def test_get_supported_languages_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.SupportedLanguages.pb(return_value) + return_value = adaptive_mt.AdaptiveMtDataset.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_supported_languages(**mock_args) + client.get_adaptive_mt_dataset(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/supportedLanguages" + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*}" % client.transport._host, args[1], ) -def test_get_supported_languages_rest_flattened_error(transport: str = "rest"): +def test_get_adaptive_mt_dataset_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4160,16 +10176,14 @@ def test_get_supported_languages_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. - with pytest.raises(ValueError): - client.get_supported_languages( - translation_service.GetSupportedLanguagesRequest(), - parent="parent_value", - model="model_value", - display_language_code="display_language_code_value", + with pytest.raises(ValueError): + client.get_adaptive_mt_dataset( + adaptive_mt.GetAdaptiveMtDatasetRequest(), + name="name_value", ) -def test_get_supported_languages_rest_error(): +def test_get_adaptive_mt_dataset_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4178,11 +10192,11 @@ def test_get_supported_languages_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.TranslateDocumentRequest, + adaptive_mt.ListAdaptiveMtDatasetsRequest, dict, ], ) -def test_translate_document_rest(request_type): +def test_list_adaptive_mt_datasets_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4195,34 +10209,33 @@ def test_translate_document_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateDocumentResponse( - model="model_value", + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.TranslateDocumentResponse.pb(return_value) + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.translate_document(request) + response = client.list_adaptive_mt_datasets(request) # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateDocumentResponse) - assert response.model == "model_value" + assert isinstance(response, pagers.ListAdaptiveMtDatasetsPager) + assert response.next_page_token == "next_page_token_value" -def test_translate_document_rest_required_fields( - request_type=translation_service.TranslateDocumentRequest, +def test_list_adaptive_mt_datasets_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["target_language_code"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4237,24 +10250,29 @@ def test_translate_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).translate_document._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["targetLanguageCode"] = "target_language_code_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).translate_document._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_datasets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "targetLanguageCode" in jsonified_request - assert jsonified_request["targetLanguageCode"] == "target_language_code_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4263,7 +10281,7 @@ def test_translate_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = translation_service.TranslateDocumentResponse() + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4275,51 +10293,48 @@ def test_translate_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.TranslateDocumentResponse.pb( - return_value - ) + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.translate_document(request) + response = client.list_adaptive_mt_datasets(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_translate_document_rest_unset_required_fields(): +def test_list_adaptive_mt_datasets_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.translate_document._get_unset_required_fields({}) + unset_fields = transport.list_adaptive_mt_datasets._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "parent", - "targetLanguageCode", - "documentInputConfig", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_translate_document_rest_interceptors(null_interceptor): +def test_list_adaptive_mt_datasets_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4332,14 +10347,14 @@ def test_translate_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_translate_document" + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_datasets" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_translate_document" + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_datasets" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.TranslateDocumentRequest.pb( - translation_service.TranslateDocumentRequest() + pb_message = adaptive_mt.ListAdaptiveMtDatasetsRequest.pb( + adaptive_mt.ListAdaptiveMtDatasetsRequest() ) transcode.return_value = { "method": "post", @@ -4351,21 +10366,19 @@ def test_translate_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - translation_service.TranslateDocumentResponse.to_json( - translation_service.TranslateDocumentResponse() - ) + req.return_value._content = adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json( + adaptive_mt.ListAdaptiveMtDatasetsResponse() ) - request = translation_service.TranslateDocumentRequest() + request = adaptive_mt.ListAdaptiveMtDatasetsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.TranslateDocumentResponse() + post.return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() - client.translate_document( + client.list_adaptive_mt_datasets( request, metadata=[ ("key", "val"), @@ -4377,8 +10390,8 @@ def test_translate_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_translate_document_rest_bad_request( - transport: str = "rest", request_type=translation_service.TranslateDocumentRequest +def test_list_adaptive_mt_datasets_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtDatasetsRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4398,23 +10411,137 @@ def test_translate_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.translate_document(request) + client.list_adaptive_mt_datasets(request) -def test_translate_document_rest_error(): +def test_list_adaptive_mt_datasets_rest_flattened(): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtDatasetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_adaptive_mt_datasets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}/adaptiveMtDatasets" + % client.transport._host, + args[1], + ) + + +def test_list_adaptive_mt_datasets_rest_flattened_error(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_adaptive_mt_datasets( + adaptive_mt.ListAdaptiveMtDatasetsRequest(), + parent="parent_value", + ) + + +def test_list_adaptive_mt_datasets_rest_pager(transport: str = "rest"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtDatasetsResponse( + adaptive_mt_datasets=[ + adaptive_mt.AdaptiveMtDataset(), + adaptive_mt.AdaptiveMtDataset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtDatasetsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_adaptive_mt_datasets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtDataset) for i in results) + + pages = list(client.list_adaptive_mt_datasets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - translation_service.BatchTranslateTextRequest, + adaptive_mt.AdaptiveMtTranslateRequest, dict, ], ) -def test_batch_translate_text_rest(request_type): +def test_adaptive_mt_translate_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4427,30 +10554,35 @@ def test_batch_translate_text_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.AdaptiveMtTranslateResponse( + language_code="language_code_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_translate_text(request) + response = client.adaptive_mt_translate(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, adaptive_mt.AdaptiveMtTranslateResponse) + assert response.language_code == "language_code_value" -def test_batch_translate_text_rest_required_fields( - request_type=translation_service.BatchTranslateTextRequest, +def test_adaptive_mt_translate_rest_required_fields( + request_type=adaptive_mt.AdaptiveMtTranslateRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["source_language_code"] = "" - request_init["target_language_codes"] = "" + request_init["dataset"] = "" + request_init["content"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4465,27 +10597,27 @@ def test_batch_translate_text_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_text._get_unset_required_fields(jsonified_request) + ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["sourceLanguageCode"] = "source_language_code_value" - jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + jsonified_request["dataset"] = "dataset_value" + jsonified_request["content"] = "content_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_text._get_unset_required_fields(jsonified_request) + ).adaptive_mt_translate._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "sourceLanguageCode" in jsonified_request - assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" - assert "targetLanguageCodes" in jsonified_request - assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + assert "dataset" in jsonified_request + assert jsonified_request["dataset"] == "dataset_value" + assert "content" in jsonified_request + assert jsonified_request["content"] == "content_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4494,7 +10626,7 @@ def test_batch_translate_text_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.AdaptiveMtTranslateResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4514,40 +10646,41 @@ def test_batch_translate_text_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_translate_text(request) + response = client.adaptive_mt_translate(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_translate_text_rest_unset_required_fields(): +def test_adaptive_mt_translate_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_translate_text._get_unset_required_fields({}) + unset_fields = transport.adaptive_mt_translate._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "sourceLanguageCode", - "targetLanguageCodes", - "inputConfigs", - "outputConfig", + "dataset", + "content", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_translate_text_rest_interceptors(null_interceptor): +def test_adaptive_mt_translate_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4560,16 +10693,14 @@ def test_batch_translate_text_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_batch_translate_text" + transports.TranslationServiceRestInterceptor, "post_adaptive_mt_translate" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_batch_translate_text" + transports.TranslationServiceRestInterceptor, "pre_adaptive_mt_translate" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.BatchTranslateTextRequest.pb( - translation_service.BatchTranslateTextRequest() + pb_message = adaptive_mt.AdaptiveMtTranslateRequest.pb( + adaptive_mt.AdaptiveMtTranslateRequest() ) transcode.return_value = { "method": "post", @@ -4581,55 +10712,114 @@ def test_batch_translate_text_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = adaptive_mt.AdaptiveMtTranslateResponse.to_json( + adaptive_mt.AdaptiveMtTranslateResponse() + ) + + request = adaptive_mt.AdaptiveMtTranslateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + client.adaptive_mt_translate( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_adaptive_mt_translate_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.AdaptiveMtTranslateRequest +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.adaptive_mt_translate(request) + + +def test_adaptive_mt_translate_rest_flattened(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = adaptive_mt.AdaptiveMtTranslateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + content=["content_value"], ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtTranslateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request = translation_service.BatchTranslateTextRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + client.adaptive_mt_translate(**mock_args) - client.batch_translate_text( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3/{parent=projects/*/locations/*}:adaptiveMtTranslate" + % client.transport._host, + args[1], ) - pre.assert_called_once() - post.assert_called_once() - -def test_batch_translate_text_rest_bad_request( - transport: str = "rest", request_type=translation_service.BatchTranslateTextRequest -): +def test_adaptive_mt_translate_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_translate_text(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.adaptive_mt_translate( + adaptive_mt.AdaptiveMtTranslateRequest(), + parent="parent_value", + content=["content_value"], + ) -def test_batch_translate_text_rest_error(): +def test_adaptive_mt_translate_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4638,47 +10828,56 @@ def test_batch_translate_text_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.BatchTranslateDocumentRequest, + adaptive_mt.GetAdaptiveMtFileRequest, dict, ], ) -def test_batch_translate_document_rest(request_type): +def test_get_adaptive_mt_file_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.AdaptiveMtFile( + name="name_value", + display_name="display_name_value", + entry_count=1210, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_translate_document(request) + response = client.get_adaptive_mt_file(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, adaptive_mt.AdaptiveMtFile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.entry_count == 1210 -def test_batch_translate_document_rest_required_fields( - request_type=translation_service.BatchTranslateDocumentRequest, +def test_get_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.GetAdaptiveMtFileRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["source_language_code"] = "" - request_init["target_language_codes"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4693,27 +10892,21 @@ def test_batch_translate_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_document._get_unset_required_fields(jsonified_request) + ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["sourceLanguageCode"] = "source_language_code_value" - jsonified_request["targetLanguageCodes"] = "target_language_codes_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).batch_translate_document._get_unset_required_fields(jsonified_request) + ).get_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "sourceLanguageCode" in jsonified_request - assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" - assert "targetLanguageCodes" in jsonified_request - assert jsonified_request["targetLanguageCodes"] == "target_language_codes_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4722,7 +10915,7 @@ def test_batch_translate_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.AdaptiveMtFile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4734,48 +10927,39 @@ def test_batch_translate_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.batch_translate_document(request) + response = client.get_adaptive_mt_file(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_batch_translate_document_rest_unset_required_fields(): +def test_get_adaptive_mt_file_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.batch_translate_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "sourceLanguageCode", - "targetLanguageCodes", - "inputConfigs", - "outputConfig", - ) - ) - ) + unset_fields = transport.get_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_translate_document_rest_interceptors(null_interceptor): +def test_get_adaptive_mt_file_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4788,16 +10972,14 @@ def test_batch_translate_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_batch_translate_document" + transports.TranslationServiceRestInterceptor, "post_get_adaptive_mt_file" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_batch_translate_document" + transports.TranslationServiceRestInterceptor, "pre_get_adaptive_mt_file" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.BatchTranslateDocumentRequest.pb( - translation_service.BatchTranslateDocumentRequest() + pb_message = adaptive_mt.GetAdaptiveMtFileRequest.pb( + adaptive_mt.GetAdaptiveMtFileRequest() ) transcode.return_value = { "method": "post", @@ -4809,19 +10991,19 @@ def test_batch_translate_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = adaptive_mt.AdaptiveMtFile.to_json( + adaptive_mt.AdaptiveMtFile() ) - request = translation_service.BatchTranslateDocumentRequest() + request = adaptive_mt.GetAdaptiveMtFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = adaptive_mt.AdaptiveMtFile() - client.batch_translate_document( + client.get_adaptive_mt_file( request, metadata=[ ("key", "val"), @@ -4833,9 +11015,8 @@ def test_batch_translate_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_batch_translate_document_rest_bad_request( - transport: str = "rest", - request_type=translation_service.BatchTranslateDocumentRequest, +def test_get_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.GetAdaptiveMtFileRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4843,7 +11024,9 @@ def test_batch_translate_document_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4855,10 +11038,10 @@ def test_batch_translate_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_translate_document(request) + client.get_adaptive_mt_file(request) -def test_batch_translate_document_rest_flattened(): +def test_get_adaptive_mt_file_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4867,52 +11050,42 @@ def test_batch_translate_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.AdaptiveMtFile() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.AdaptiveMtFile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.batch_translate_document(**mock_args) + client.get_adaptive_mt_file(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}:batchTranslateDocument" + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" % client.transport._host, args[1], ) -def test_batch_translate_document_rest_flattened_error(transport: str = "rest"): +def test_get_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4921,27 +11094,13 @@ def test_batch_translate_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.batch_translate_document( - translation_service.BatchTranslateDocumentRequest(), - parent="parent_value", - source_language_code="source_language_code_value", - target_language_codes=["target_language_codes_value"], - input_configs=[ - translation_service.BatchDocumentInputConfig( - gcs_source=translation_service.GcsSource( - input_uri="input_uri_value" - ) - ) - ], - output_config=translation_service.BatchDocumentOutputConfig( - gcs_destination=translation_service.GcsDestination( - output_uri_prefix="output_uri_prefix_value" - ) - ), + client.get_adaptive_mt_file( + adaptive_mt.GetAdaptiveMtFileRequest(), + name="name_value", ) -def test_batch_translate_document_rest_error(): +def test_get_adaptive_mt_file_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4950,127 +11109,47 @@ def test_batch_translate_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.CreateGlossaryRequest, + adaptive_mt.DeleteAdaptiveMtFileRequest, dict, ], ) -def test_create_glossary_rest(request_type): +def test_delete_adaptive_mt_file_rest(request_type): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["glossary"] = { - "name": "name_value", - "language_pair": { - "source_language_code": "source_language_code_value", - "target_language_code": "target_language_code_value", - }, - "language_codes_set": { - "language_codes": ["language_codes_value1", "language_codes_value2"] - }, - "input_config": {"gcs_source": {"input_uri": "input_uri_value"}}, - "entry_count": 1210, - "submit_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "display_name": "display_name_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = translation_service.CreateGlossaryRequest.meta.fields["glossary"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["glossary"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["glossary"][field])): - del request_init["glossary"][field][i][subfield] - else: - del request_init["glossary"][field][subfield] + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_glossary(request) + response = client.delete_adaptive_mt_file(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert response is None -def test_create_glossary_rest_required_fields( - request_type=translation_service.CreateGlossaryRequest, +def test_delete_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.DeleteAdaptiveMtFileRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5085,21 +11164,21 @@ def test_create_glossary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_glossary._get_unset_required_fields(jsonified_request) + ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_glossary._get_unset_required_fields(jsonified_request) + ).delete_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5108,7 +11187,7 @@ def test_create_glossary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5120,45 +11199,36 @@ def test_create_glossary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_glossary(request) + response = client.delete_adaptive_mt_file(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_glossary_rest_unset_required_fields(): +def test_delete_adaptive_mt_file_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_glossary._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "glossary", - ) - ) - ) + unset_fields = transport.delete_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_glossary_rest_interceptors(null_interceptor): +def test_delete_adaptive_mt_file_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5171,16 +11241,11 @@ def test_create_glossary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_create_glossary" - ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_create_glossary" + transports.TranslationServiceRestInterceptor, "pre_delete_adaptive_mt_file" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = translation_service.CreateGlossaryRequest.pb( - translation_service.CreateGlossaryRequest() + pb_message = adaptive_mt.DeleteAdaptiveMtFileRequest.pb( + adaptive_mt.DeleteAdaptiveMtFileRequest() ) transcode.return_value = { "method": "post", @@ -5192,19 +11257,15 @@ def test_create_glossary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - request = translation_service.CreateGlossaryRequest() + request = adaptive_mt.DeleteAdaptiveMtFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.create_glossary( + client.delete_adaptive_mt_file( request, metadata=[ ("key", "val"), @@ -5213,11 +11274,10 @@ def test_create_glossary_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.CreateGlossaryRequest +def test_delete_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.DeleteAdaptiveMtFileRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5225,7 +11285,9 @@ def test_create_glossary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5237,10 +11299,10 @@ def test_create_glossary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_glossary(request) + client.delete_adaptive_mt_file(request) -def test_create_glossary_rest_flattened(): +def test_delete_adaptive_mt_file_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5249,38 +11311,40 @@ def test_create_glossary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_glossary(**mock_args) + client.delete_adaptive_mt_file(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + "%s/v3/{name=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}" + % client.transport._host, args[1], ) -def test_create_glossary_rest_flattened_error(transport: str = "rest"): +def test_delete_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5289,14 +11353,13 @@ def test_create_glossary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_glossary( - translation_service.CreateGlossaryRequest(), - parent="parent_value", - glossary=translation_service.Glossary(name="name_value"), + client.delete_adaptive_mt_file( + adaptive_mt.DeleteAdaptiveMtFileRequest(), + name="name_value", ) -def test_create_glossary_rest_error(): +def test_delete_adaptive_mt_file_rest_error(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5305,45 +11368,44 @@ def test_create_glossary_rest_error(): @pytest.mark.parametrize( "request_type", [ - translation_service.ListGlossariesRequest, + adaptive_mt.ImportAdaptiveMtFileRequest, dict, ], ) -def test_list_glossaries_rest(request_type): +def test_import_adaptive_mt_file_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse( - next_page_token="next_page_token_value", - ) + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_glossaries(request) + response = client.import_adaptive_mt_file(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGlossariesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, adaptive_mt.ImportAdaptiveMtFileResponse) -def test_list_glossaries_rest_required_fields( - request_type=translation_service.ListGlossariesRequest, +def test_import_adaptive_mt_file_rest_required_fields( + request_type=adaptive_mt.ImportAdaptiveMtFileRequest, ): transport_class = transports.TranslationServiceRestTransport @@ -5363,7 +11425,7 @@ def test_list_glossaries_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_glossaries._get_unset_required_fields(jsonified_request) + ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5372,15 +11434,7 @@ def test_list_glossaries_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_glossaries._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).import_adaptive_mt_file._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5394,7 +11448,7 @@ def test_list_glossaries_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse() + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5406,48 +11460,40 @@ def test_list_glossaries_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_glossaries(request) + response = client.import_adaptive_mt_file(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_glossaries_rest_unset_required_fields(): +def test_import_adaptive_mt_file_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_glossaries._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.import_adaptive_mt_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_glossaries_rest_interceptors(null_interceptor): +def test_import_adaptive_mt_file_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5460,14 +11506,14 @@ def test_list_glossaries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_list_glossaries" + transports.TranslationServiceRestInterceptor, "post_import_adaptive_mt_file" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_list_glossaries" + transports.TranslationServiceRestInterceptor, "pre_import_adaptive_mt_file" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.ListGlossariesRequest.pb( - translation_service.ListGlossariesRequest() + pb_message = adaptive_mt.ImportAdaptiveMtFileRequest.pb( + adaptive_mt.ImportAdaptiveMtFileRequest() ) transcode.return_value = { "method": "post", @@ -5479,19 +11525,19 @@ def test_list_glossaries_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.ListGlossariesResponse.to_json( - translation_service.ListGlossariesResponse() + req.return_value._content = adaptive_mt.ImportAdaptiveMtFileResponse.to_json( + adaptive_mt.ImportAdaptiveMtFileResponse() ) - request = translation_service.ListGlossariesRequest() + request = adaptive_mt.ImportAdaptiveMtFileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.ListGlossariesResponse() + post.return_value = adaptive_mt.ImportAdaptiveMtFileResponse() - client.list_glossaries( + client.import_adaptive_mt_file( request, metadata=[ ("key", "val"), @@ -5503,8 +11549,8 @@ def test_list_glossaries_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_glossaries_rest_bad_request( - transport: str = "rest", request_type=translation_service.ListGlossariesRequest +def test_import_adaptive_mt_file_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ImportAdaptiveMtFileRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5512,7 +11558,9 @@ def test_list_glossaries_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5524,10 +11572,10 @@ def test_list_glossaries_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_glossaries(request) + client.import_adaptive_mt_file(request) -def test_list_glossaries_rest_flattened(): +def test_import_adaptive_mt_file_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5536,10 +11584,12 @@ def test_list_glossaries_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.ListGlossariesResponse() + return_value = adaptive_mt.ImportAdaptiveMtFileResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -5551,24 +11601,25 @@ def test_list_glossaries_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.ListGlossariesResponse.pb(return_value) + return_value = adaptive_mt.ImportAdaptiveMtFileResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_glossaries(**mock_args) + client.import_adaptive_mt_file(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}:importAdaptiveMtFile" + % client.transport._host, args[1], ) -def test_list_glossaries_rest_flattened_error(transport: str = "rest"): +def test_import_adaptive_mt_file_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5577,126 +11628,67 @@ def test_list_glossaries_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_glossaries( - translation_service.ListGlossariesRequest(), + client.import_adaptive_mt_file( + adaptive_mt.ImportAdaptiveMtFileRequest(), parent="parent_value", ) -def test_list_glossaries_rest_pager(transport: str = "rest"): +def test_import_adaptive_mt_file_rest_error(): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - translation_service.Glossary(), - ], - next_page_token="abc", - ), - translation_service.ListGlossariesResponse( - glossaries=[], - next_page_token="def", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - ], - next_page_token="ghi", - ), - translation_service.ListGlossariesResponse( - glossaries=[ - translation_service.Glossary(), - translation_service.Glossary(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - translation_service.ListGlossariesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_glossaries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, translation_service.Glossary) for i in results) - - pages = list(client.list_glossaries(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - translation_service.GetGlossaryRequest, + adaptive_mt.ListAdaptiveMtFilesRequest, dict, ], ) -def test_get_glossary_rest(request_type): +def test_list_adaptive_mt_files_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary( - name="name_value", - entry_count=1210, - display_name="display_name_value", + return_value = adaptive_mt.ListAdaptiveMtFilesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_glossary(request) + response = client.list_adaptive_mt_files(request) # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.Glossary) - assert response.name == "name_value" - assert response.entry_count == 1210 - assert response.display_name == "display_name_value" + assert isinstance(response, pagers.ListAdaptiveMtFilesPager) + assert response.next_page_token == "next_page_token_value" -def test_get_glossary_rest_required_fields( - request_type=translation_service.GetGlossaryRequest, +def test_list_adaptive_mt_files_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtFilesRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5711,21 +11703,28 @@ def test_get_glossary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_glossary._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_glossary._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_files._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5734,7 +11733,7 @@ def test_get_glossary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary() + return_value = adaptive_mt.ListAdaptiveMtFilesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5755,30 +11754,38 @@ def test_get_glossary_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_glossary(request) + response = client.list_adaptive_mt_files(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_glossary_rest_unset_required_fields(): +def test_list_adaptive_mt_files_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + unset_fields = transport.list_adaptive_mt_files._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_glossary_rest_interceptors(null_interceptor): +def test_list_adaptive_mt_files_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5791,14 +11798,14 @@ def test_get_glossary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_get_glossary" + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_files" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_get_glossary" + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_files" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.GetGlossaryRequest.pb( - translation_service.GetGlossaryRequest() + pb_message = adaptive_mt.ListAdaptiveMtFilesRequest.pb( + adaptive_mt.ListAdaptiveMtFilesRequest() ) transcode.return_value = { "method": "post", @@ -5810,19 +11817,19 @@ def test_get_glossary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = translation_service.Glossary.to_json( - translation_service.Glossary() + req.return_value._content = adaptive_mt.ListAdaptiveMtFilesResponse.to_json( + adaptive_mt.ListAdaptiveMtFilesResponse() ) - request = translation_service.GetGlossaryRequest() + request = adaptive_mt.ListAdaptiveMtFilesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = translation_service.Glossary() + post.return_value = adaptive_mt.ListAdaptiveMtFilesResponse() - client.get_glossary( + client.list_adaptive_mt_files( request, metadata=[ ("key", "val"), @@ -5834,8 +11841,8 @@ def test_get_glossary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.GetGlossaryRequest +def test_list_adaptive_mt_files_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtFilesRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5843,7 +11850,9 @@ def test_get_glossary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5855,10 +11864,10 @@ def test_get_glossary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_glossary(request) + client.list_adaptive_mt_files(request) -def test_get_glossary_rest_flattened(): +def test_list_adaptive_mt_files_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5867,16 +11876,16 @@ def test_get_glossary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = translation_service.Glossary() + return_value = adaptive_mt.ListAdaptiveMtFilesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/glossaries/sample3" + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -5884,24 +11893,25 @@ def test_get_glossary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = translation_service.Glossary.pb(return_value) + return_value = adaptive_mt.ListAdaptiveMtFilesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_glossary(**mock_args) + client.list_adaptive_mt_files(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*}/adaptiveMtFiles" + % client.transport._host, args[1], ) -def test_get_glossary_rest_flattened_error(transport: str = "rest"): +def test_list_adaptive_mt_files_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5910,60 +11920,126 @@ def test_get_glossary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_glossary( - translation_service.GetGlossaryRequest(), - name="name_value", + client.list_adaptive_mt_files( + adaptive_mt.ListAdaptiveMtFilesRequest(), + parent="parent_value", ) -def test_get_glossary_rest_error(): +def test_list_adaptive_mt_files_rest_pager(transport: str = "rest"): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtFilesResponse( + adaptive_mt_files=[ + adaptive_mt.AdaptiveMtFile(), + adaptive_mt.AdaptiveMtFile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtFilesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3" + } + + pager = client.list_adaptive_mt_files(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtFile) for i in results) + + pages = list(client.list_adaptive_mt_files(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - translation_service.DeleteGlossaryRequest, + adaptive_mt.ListAdaptiveMtSentencesRequest, dict, ], ) -def test_delete_glossary_rest(request_type): +def test_list_adaptive_mt_sentences_rest(request_type): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_glossary(request) + response = client.list_adaptive_mt_sentences(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListAdaptiveMtSentencesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_glossary_rest_required_fields( - request_type=translation_service.DeleteGlossaryRequest, +def test_list_adaptive_mt_sentences_rest_required_fields( + request_type=adaptive_mt.ListAdaptiveMtSentencesRequest, ): transport_class = transports.TranslationServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5978,21 +12054,28 @@ def test_delete_glossary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_glossary._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_glossary._get_unset_required_fields(jsonified_request) + ).list_adaptive_mt_sentences._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6001,7 +12084,7 @@ def test_delete_glossary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6013,36 +12096,47 @@ def test_delete_glossary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_glossary(request) + response = client.list_adaptive_mt_sentences(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_glossary_rest_unset_required_fields(): +def test_list_adaptive_mt_sentences_rest_unset_required_fields(): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_glossary._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_adaptive_mt_sentences._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_glossary_rest_interceptors(null_interceptor): +def test_list_adaptive_mt_sentences_rest_interceptors(null_interceptor): transport = transports.TranslationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6055,16 +12149,14 @@ def test_delete_glossary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.TranslationServiceRestInterceptor, "post_delete_glossary" + transports.TranslationServiceRestInterceptor, "post_list_adaptive_mt_sentences" ) as post, mock.patch.object( - transports.TranslationServiceRestInterceptor, "pre_delete_glossary" + transports.TranslationServiceRestInterceptor, "pre_list_adaptive_mt_sentences" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = translation_service.DeleteGlossaryRequest.pb( - translation_service.DeleteGlossaryRequest() + pb_message = adaptive_mt.ListAdaptiveMtSentencesRequest.pb( + adaptive_mt.ListAdaptiveMtSentencesRequest() ) transcode.return_value = { "method": "post", @@ -6076,19 +12168,19 @@ def test_delete_glossary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = adaptive_mt.ListAdaptiveMtSentencesResponse.to_json( + adaptive_mt.ListAdaptiveMtSentencesResponse() ) - request = translation_service.DeleteGlossaryRequest() + request = adaptive_mt.ListAdaptiveMtSentencesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() - client.delete_glossary( + client.list_adaptive_mt_sentences( request, metadata=[ ("key", "val"), @@ -6100,8 +12192,8 @@ def test_delete_glossary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_glossary_rest_bad_request( - transport: str = "rest", request_type=translation_service.DeleteGlossaryRequest +def test_list_adaptive_mt_sentences_rest_bad_request( + transport: str = "rest", request_type=adaptive_mt.ListAdaptiveMtSentencesRequest ): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6109,7 +12201,9 @@ def test_delete_glossary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6121,10 +12215,10 @@ def test_delete_glossary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_glossary(request) + client.list_adaptive_mt_sentences(request) -def test_delete_glossary_rest_flattened(): +def test_list_adaptive_mt_sentences_rest_flattened(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6133,39 +12227,42 @@ def test_delete_glossary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/glossaries/sample3" + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = adaptive_mt.ListAdaptiveMtSentencesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_glossary(**mock_args) + client.list_adaptive_mt_sentences(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v3/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + "%s/v3/{parent=projects/*/locations/*/adaptiveMtDatasets/*/adaptiveMtFiles/*}/adaptiveMtSentences" + % client.transport._host, args[1], ) -def test_delete_glossary_rest_flattened_error(transport: str = "rest"): +def test_list_adaptive_mt_sentences_rest_flattened_error(transport: str = "rest"): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6174,17 +12271,76 @@ def test_delete_glossary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_glossary( - translation_service.DeleteGlossaryRequest(), - name="name_value", + client.list_adaptive_mt_sentences( + adaptive_mt.ListAdaptiveMtSentencesRequest(), + parent="parent_value", ) -def test_delete_glossary_rest_error(): +def test_list_adaptive_mt_sentences_rest_pager(transport: str = "rest"): client = TranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="abc", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[], + next_page_token="def", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + ], + next_page_token="ghi", + ), + adaptive_mt.ListAdaptiveMtSentencesResponse( + adaptive_mt_sentences=[ + adaptive_mt.AdaptiveMtSentence(), + adaptive_mt.AdaptiveMtSentence(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + adaptive_mt.ListAdaptiveMtSentencesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/adaptiveMtDatasets/sample3/adaptiveMtFiles/sample4" + } + + pager = client.list_adaptive_mt_sentences(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, adaptive_mt.AdaptiveMtSentence) for i in results) + + pages = list(client.list_adaptive_mt_sentences(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -6335,6 +12491,16 @@ def test_translation_service_base_transport(): "list_glossaries", "get_glossary", "delete_glossary", + "create_adaptive_mt_dataset", + "delete_adaptive_mt_dataset", + "get_adaptive_mt_dataset", + "list_adaptive_mt_datasets", + "adaptive_mt_translate", + "get_adaptive_mt_file", + "delete_adaptive_mt_file", + "import_adaptive_mt_file", + "list_adaptive_mt_files", + "list_adaptive_mt_sentences", ) for method in methods: with pytest.raises(NotImplementedError): @@ -6659,6 +12825,36 @@ def test_translation_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_glossary._session session2 = client2.transport.delete_glossary._session assert session1 != session2 + session1 = client1.transport.create_adaptive_mt_dataset._session + session2 = client2.transport.create_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.delete_adaptive_mt_dataset._session + session2 = client2.transport.delete_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.get_adaptive_mt_dataset._session + session2 = client2.transport.get_adaptive_mt_dataset._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_datasets._session + session2 = client2.transport.list_adaptive_mt_datasets._session + assert session1 != session2 + session1 = client1.transport.adaptive_mt_translate._session + session2 = client2.transport.adaptive_mt_translate._session + assert session1 != session2 + session1 = client1.transport.get_adaptive_mt_file._session + session2 = client2.transport.get_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.delete_adaptive_mt_file._session + session2 = client2.transport.delete_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.import_adaptive_mt_file._session + session2 = client2.transport.import_adaptive_mt_file._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_files._session + session2 = client2.transport.list_adaptive_mt_files._session + assert session1 != session2 + session1 = client1.transport.list_adaptive_mt_sentences._session + session2 = client2.transport.list_adaptive_mt_sentences._session + assert session1 != session2 def test_translation_service_grpc_transport_channel(): @@ -6821,6 +13017,101 @@ def test_translation_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_adaptive_mt_dataset_path(): + project = "squid" + location = "clam" + dataset = "whelk" + expected = ( + "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}".format( + project=project, + location=location, + dataset=dataset, + ) + ) + actual = TranslationServiceClient.adaptive_mt_dataset_path( + project, location, dataset + ) + assert expected == actual + + +def test_parse_adaptive_mt_dataset_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", + } + path = TranslationServiceClient.adaptive_mt_dataset_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_dataset_path(path) + assert expected == actual + + +def test_adaptive_mt_file_path(): + project = "cuttlefish" + location = "mussel" + dataset = "winkle" + file = "nautilus" + expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}".format( + project=project, + location=location, + dataset=dataset, + file=file, + ) + actual = TranslationServiceClient.adaptive_mt_file_path( + project, location, dataset, file + ) + assert expected == actual + + +def test_parse_adaptive_mt_file_path(): + expected = { + "project": "scallop", + "location": "abalone", + "dataset": "squid", + "file": "clam", + } + path = TranslationServiceClient.adaptive_mt_file_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_file_path(path) + assert expected == actual + + +def test_adaptive_mt_sentence_path(): + project = "whelk" + location = "octopus" + dataset = "oyster" + file = "nudibranch" + sentence = "cuttlefish" + expected = "projects/{project}/locations/{location}/adaptiveMtDatasets/{dataset}/adaptiveMtFiles/{file}/adaptiveMtSentences/{sentence}".format( + project=project, + location=location, + dataset=dataset, + file=file, + sentence=sentence, + ) + actual = TranslationServiceClient.adaptive_mt_sentence_path( + project, location, dataset, file, sentence + ) + assert expected == actual + + +def test_parse_adaptive_mt_sentence_path(): + expected = { + "project": "mussel", + "location": "winkle", + "dataset": "nautilus", + "file": "scallop", + "sentence": "abalone", + } + path = TranslationServiceClient.adaptive_mt_sentence_path(**expected) + + # Check that the path construction is reversible. + actual = TranslationServiceClient.parse_adaptive_mt_sentence_path(path) + assert expected == actual + + def test_glossary_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py index 1c98eacc1adb..e728b8570084 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py @@ -26,14 +26,6 @@ import json import math -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - from google.api_core import ( future, gapic_v1, @@ -49,6 +41,18 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + from google.cloud.translate_v3beta1.services.translation_service import ( TranslationServiceAsyncClient, TranslationServiceClient, @@ -56,10 +60,6 @@ transports, ) from google.cloud.translate_v3beta1.types import translation_service -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore def client_cert_source_callback(): diff --git a/packages/google-cloud-translate/tests/unit/v2/test_client.py b/packages/google-cloud-translate/tests/unit/v2/test_client.py index d032fe1baa92..0a33d0406e7a 100644 --- a/packages/google-cloud-translate/tests/unit/v2/test_client.py +++ b/packages/google-cloud-translate/tests/unit/v2/test_client.py @@ -27,6 +27,7 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): from google.cloud._http import ClientInfo + from google.cloud.translate_v2._http import Connection from google.cloud.translate_v2.client import ENGLISH_ISO_639 @@ -40,6 +41,7 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): from google.cloud._http import ClientInfo + from google.cloud.translate_v2._http import Connection http = object() diff --git a/packages/google-cloud-vmwareengine/CHANGELOG.md b/packages/google-cloud-vmwareengine/CHANGELOG.md index ac06f6941465..85a7d408887e 100644 --- a/packages/google-cloud-vmwareengine/CHANGELOG.md +++ b/packages/google-cloud-vmwareengine/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-vmwareengine-v1.2.0...google-cloud-vmwareengine-v1.3.0) (2024-01-19) + + +### Features + +* [google-cloud-vmwareengine] Adding ManagementDnsZoneBinding, DnsBindPermission, DnsForwarding, ExternalAccessRule, ExternalAddress, LoggingServer, NetworkPeering, Node and stretched PC features ([#12207](https://github.com/googleapis/google-cloud-python/issues/12207)) ([d18cf96](https://github.com/googleapis/google-cloud-python/commit/d18cf9674cb1d3a07cadff016f7d8ead22f194ca)) + + +### Documentation + +* clarified wording around private cloud and update cluster ([d18cf96](https://github.com/googleapis/google-cloud-python/commit/d18cf9674cb1d3a07cadff016f7d8ead22f194ca)) + ## [1.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-vmwareengine-v1.1.2...google-cloud-vmwareengine-v1.2.0) (2023-12-07) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py index 91031cad6afc..f9f0a06d6742 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/__init__.py @@ -26,32 +26,67 @@ ) from google.cloud.vmwareengine_v1.types.vmwareengine import ( CreateClusterRequest, + CreateExternalAccessRuleRequest, + CreateExternalAddressRequest, CreateHcxActivationKeyRequest, + CreateLoggingServerRequest, + CreateManagementDnsZoneBindingRequest, + CreateNetworkPeeringRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreatePrivateConnectionRequest, CreateVmwareEngineNetworkRequest, DeleteClusterRequest, + DeleteExternalAccessRuleRequest, + DeleteExternalAddressRequest, + DeleteLoggingServerRequest, + DeleteManagementDnsZoneBindingRequest, + DeleteNetworkPeeringRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, DeletePrivateConnectionRequest, DeleteVmwareEngineNetworkRequest, + FetchNetworkPolicyExternalAddressesRequest, + FetchNetworkPolicyExternalAddressesResponse, GetClusterRequest, + GetDnsBindPermissionRequest, + GetDnsForwardingRequest, + GetExternalAccessRuleRequest, + GetExternalAddressRequest, GetHcxActivationKeyRequest, + GetLoggingServerRequest, + GetManagementDnsZoneBindingRequest, + GetNetworkPeeringRequest, GetNetworkPolicyRequest, + GetNodeRequest, GetNodeTypeRequest, GetPrivateCloudRequest, GetPrivateConnectionRequest, GetSubnetRequest, GetVmwareEngineNetworkRequest, + GrantDnsBindPermissionRequest, ListClustersRequest, ListClustersResponse, + ListExternalAccessRulesRequest, + ListExternalAccessRulesResponse, + ListExternalAddressesRequest, + ListExternalAddressesResponse, ListHcxActivationKeysRequest, ListHcxActivationKeysResponse, + ListLoggingServersRequest, + ListLoggingServersResponse, + ListManagementDnsZoneBindingsRequest, + ListManagementDnsZoneBindingsResponse, + ListNetworkPeeringsRequest, + ListNetworkPeeringsResponse, ListNetworkPoliciesRequest, ListNetworkPoliciesResponse, + ListNodesRequest, + ListNodesResponse, ListNodeTypesRequest, ListNodeTypesResponse, + ListPeeringRoutesRequest, + ListPeeringRoutesResponse, ListPrivateCloudsRequest, ListPrivateCloudsResponse, ListPrivateConnectionPeeringRoutesRequest, @@ -63,12 +98,20 @@ ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, OperationMetadata, + RepairManagementDnsZoneBindingRequest, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, + RevokeDnsBindPermissionRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, UndeletePrivateCloudRequest, UpdateClusterRequest, + UpdateDnsForwardingRequest, + UpdateExternalAccessRuleRequest, + UpdateExternalAddressRequest, + UpdateLoggingServerRequest, + UpdateManagementDnsZoneBindingRequest, + UpdateNetworkPeeringRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdatePrivateConnectionRequest, @@ -78,16 +121,27 @@ from google.cloud.vmwareengine_v1.types.vmwareengine_resources import ( Cluster, Credentials, + DnsBindPermission, + DnsForwarding, + ExternalAccessRule, + ExternalAddress, Hcx, HcxActivationKey, + LocationMetadata, + LoggingServer, + ManagementDnsZoneBinding, NetworkConfig, + NetworkPeering, NetworkPolicy, + Node, NodeType, NodeTypeConfig, Nsx, PeeringRoute, + Principal, PrivateCloud, PrivateConnection, + StretchedClusterConfig, Subnet, Vcenter, VmwareEngineNetwork, @@ -97,32 +151,67 @@ "VmwareEngineClient", "VmwareEngineAsyncClient", "CreateClusterRequest", + "CreateExternalAccessRuleRequest", + "CreateExternalAddressRequest", "CreateHcxActivationKeyRequest", + "CreateLoggingServerRequest", + "CreateManagementDnsZoneBindingRequest", + "CreateNetworkPeeringRequest", "CreateNetworkPolicyRequest", "CreatePrivateCloudRequest", "CreatePrivateConnectionRequest", "CreateVmwareEngineNetworkRequest", "DeleteClusterRequest", + "DeleteExternalAccessRuleRequest", + "DeleteExternalAddressRequest", + "DeleteLoggingServerRequest", + "DeleteManagementDnsZoneBindingRequest", + "DeleteNetworkPeeringRequest", "DeleteNetworkPolicyRequest", "DeletePrivateCloudRequest", "DeletePrivateConnectionRequest", "DeleteVmwareEngineNetworkRequest", + "FetchNetworkPolicyExternalAddressesRequest", + "FetchNetworkPolicyExternalAddressesResponse", "GetClusterRequest", + "GetDnsBindPermissionRequest", + "GetDnsForwardingRequest", + "GetExternalAccessRuleRequest", + "GetExternalAddressRequest", "GetHcxActivationKeyRequest", + "GetLoggingServerRequest", + "GetManagementDnsZoneBindingRequest", + "GetNetworkPeeringRequest", "GetNetworkPolicyRequest", + "GetNodeRequest", "GetNodeTypeRequest", "GetPrivateCloudRequest", "GetPrivateConnectionRequest", "GetSubnetRequest", "GetVmwareEngineNetworkRequest", + "GrantDnsBindPermissionRequest", "ListClustersRequest", "ListClustersResponse", + "ListExternalAccessRulesRequest", + "ListExternalAccessRulesResponse", + "ListExternalAddressesRequest", + "ListExternalAddressesResponse", "ListHcxActivationKeysRequest", "ListHcxActivationKeysResponse", + "ListLoggingServersRequest", + "ListLoggingServersResponse", + "ListManagementDnsZoneBindingsRequest", + "ListManagementDnsZoneBindingsResponse", + "ListNetworkPeeringsRequest", + "ListNetworkPeeringsResponse", "ListNetworkPoliciesRequest", "ListNetworkPoliciesResponse", + "ListNodesRequest", + "ListNodesResponse", "ListNodeTypesRequest", "ListNodeTypesResponse", + "ListPeeringRoutesRequest", + "ListPeeringRoutesResponse", "ListPrivateCloudsRequest", "ListPrivateCloudsResponse", "ListPrivateConnectionPeeringRoutesRequest", @@ -134,12 +223,20 @@ "ListVmwareEngineNetworksRequest", "ListVmwareEngineNetworksResponse", "OperationMetadata", + "RepairManagementDnsZoneBindingRequest", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", + "RevokeDnsBindPermissionRequest", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", "UndeletePrivateCloudRequest", "UpdateClusterRequest", + "UpdateDnsForwardingRequest", + "UpdateExternalAccessRuleRequest", + "UpdateExternalAddressRequest", + "UpdateLoggingServerRequest", + "UpdateManagementDnsZoneBindingRequest", + "UpdateNetworkPeeringRequest", "UpdateNetworkPolicyRequest", "UpdatePrivateCloudRequest", "UpdatePrivateConnectionRequest", @@ -147,16 +244,27 @@ "UpdateVmwareEngineNetworkRequest", "Cluster", "Credentials", + "DnsBindPermission", + "DnsForwarding", + "ExternalAccessRule", + "ExternalAddress", "Hcx", "HcxActivationKey", + "LocationMetadata", + "LoggingServer", + "ManagementDnsZoneBinding", "NetworkConfig", + "NetworkPeering", "NetworkPolicy", + "Node", "NodeType", "NodeTypeConfig", "Nsx", "PeeringRoute", + "Principal", "PrivateCloud", "PrivateConnection", + "StretchedClusterConfig", "Subnet", "Vcenter", "VmwareEngineNetwork", diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py index 2968f1104f7a..5f7437f6f8d0 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.2.0" # {x-release-please-version} +__version__ = "1.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py index 4d790a57255d..839e73dfbc19 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/__init__.py @@ -21,32 +21,67 @@ from .services.vmware_engine import VmwareEngineAsyncClient, VmwareEngineClient from .types.vmwareengine import ( CreateClusterRequest, + CreateExternalAccessRuleRequest, + CreateExternalAddressRequest, CreateHcxActivationKeyRequest, + CreateLoggingServerRequest, + CreateManagementDnsZoneBindingRequest, + CreateNetworkPeeringRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreatePrivateConnectionRequest, CreateVmwareEngineNetworkRequest, DeleteClusterRequest, + DeleteExternalAccessRuleRequest, + DeleteExternalAddressRequest, + DeleteLoggingServerRequest, + DeleteManagementDnsZoneBindingRequest, + DeleteNetworkPeeringRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, DeletePrivateConnectionRequest, DeleteVmwareEngineNetworkRequest, + FetchNetworkPolicyExternalAddressesRequest, + FetchNetworkPolicyExternalAddressesResponse, GetClusterRequest, + GetDnsBindPermissionRequest, + GetDnsForwardingRequest, + GetExternalAccessRuleRequest, + GetExternalAddressRequest, GetHcxActivationKeyRequest, + GetLoggingServerRequest, + GetManagementDnsZoneBindingRequest, + GetNetworkPeeringRequest, GetNetworkPolicyRequest, + GetNodeRequest, GetNodeTypeRequest, GetPrivateCloudRequest, GetPrivateConnectionRequest, GetSubnetRequest, GetVmwareEngineNetworkRequest, + GrantDnsBindPermissionRequest, ListClustersRequest, ListClustersResponse, + ListExternalAccessRulesRequest, + ListExternalAccessRulesResponse, + ListExternalAddressesRequest, + ListExternalAddressesResponse, ListHcxActivationKeysRequest, ListHcxActivationKeysResponse, + ListLoggingServersRequest, + ListLoggingServersResponse, + ListManagementDnsZoneBindingsRequest, + ListManagementDnsZoneBindingsResponse, + ListNetworkPeeringsRequest, + ListNetworkPeeringsResponse, ListNetworkPoliciesRequest, ListNetworkPoliciesResponse, + ListNodesRequest, + ListNodesResponse, ListNodeTypesRequest, ListNodeTypesResponse, + ListPeeringRoutesRequest, + ListPeeringRoutesResponse, ListPrivateCloudsRequest, ListPrivateCloudsResponse, ListPrivateConnectionPeeringRoutesRequest, @@ -58,12 +93,20 @@ ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, OperationMetadata, + RepairManagementDnsZoneBindingRequest, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, + RevokeDnsBindPermissionRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, UndeletePrivateCloudRequest, UpdateClusterRequest, + UpdateDnsForwardingRequest, + UpdateExternalAccessRuleRequest, + UpdateExternalAddressRequest, + UpdateLoggingServerRequest, + UpdateManagementDnsZoneBindingRequest, + UpdateNetworkPeeringRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdatePrivateConnectionRequest, @@ -73,16 +116,27 @@ from .types.vmwareengine_resources import ( Cluster, Credentials, + DnsBindPermission, + DnsForwarding, + ExternalAccessRule, + ExternalAddress, Hcx, HcxActivationKey, + LocationMetadata, + LoggingServer, + ManagementDnsZoneBinding, NetworkConfig, + NetworkPeering, NetworkPolicy, + Node, NodeType, NodeTypeConfig, Nsx, PeeringRoute, + Principal, PrivateCloud, PrivateConnection, + StretchedClusterConfig, Subnet, Vcenter, VmwareEngineNetwork, @@ -92,35 +146,74 @@ "VmwareEngineAsyncClient", "Cluster", "CreateClusterRequest", + "CreateExternalAccessRuleRequest", + "CreateExternalAddressRequest", "CreateHcxActivationKeyRequest", + "CreateLoggingServerRequest", + "CreateManagementDnsZoneBindingRequest", + "CreateNetworkPeeringRequest", "CreateNetworkPolicyRequest", "CreatePrivateCloudRequest", "CreatePrivateConnectionRequest", "CreateVmwareEngineNetworkRequest", "Credentials", "DeleteClusterRequest", + "DeleteExternalAccessRuleRequest", + "DeleteExternalAddressRequest", + "DeleteLoggingServerRequest", + "DeleteManagementDnsZoneBindingRequest", + "DeleteNetworkPeeringRequest", "DeleteNetworkPolicyRequest", "DeletePrivateCloudRequest", "DeletePrivateConnectionRequest", "DeleteVmwareEngineNetworkRequest", + "DnsBindPermission", + "DnsForwarding", + "ExternalAccessRule", + "ExternalAddress", + "FetchNetworkPolicyExternalAddressesRequest", + "FetchNetworkPolicyExternalAddressesResponse", "GetClusterRequest", + "GetDnsBindPermissionRequest", + "GetDnsForwardingRequest", + "GetExternalAccessRuleRequest", + "GetExternalAddressRequest", "GetHcxActivationKeyRequest", + "GetLoggingServerRequest", + "GetManagementDnsZoneBindingRequest", + "GetNetworkPeeringRequest", "GetNetworkPolicyRequest", + "GetNodeRequest", "GetNodeTypeRequest", "GetPrivateCloudRequest", "GetPrivateConnectionRequest", "GetSubnetRequest", "GetVmwareEngineNetworkRequest", + "GrantDnsBindPermissionRequest", "Hcx", "HcxActivationKey", "ListClustersRequest", "ListClustersResponse", + "ListExternalAccessRulesRequest", + "ListExternalAccessRulesResponse", + "ListExternalAddressesRequest", + "ListExternalAddressesResponse", "ListHcxActivationKeysRequest", "ListHcxActivationKeysResponse", + "ListLoggingServersRequest", + "ListLoggingServersResponse", + "ListManagementDnsZoneBindingsRequest", + "ListManagementDnsZoneBindingsResponse", + "ListNetworkPeeringsRequest", + "ListNetworkPeeringsResponse", "ListNetworkPoliciesRequest", "ListNetworkPoliciesResponse", "ListNodeTypesRequest", "ListNodeTypesResponse", + "ListNodesRequest", + "ListNodesResponse", + "ListPeeringRoutesRequest", + "ListPeeringRoutesResponse", "ListPrivateCloudsRequest", "ListPrivateCloudsResponse", "ListPrivateConnectionPeeringRoutesRequest", @@ -131,22 +224,37 @@ "ListSubnetsResponse", "ListVmwareEngineNetworksRequest", "ListVmwareEngineNetworksResponse", + "LocationMetadata", + "LoggingServer", + "ManagementDnsZoneBinding", "NetworkConfig", + "NetworkPeering", "NetworkPolicy", + "Node", "NodeType", "NodeTypeConfig", "Nsx", "OperationMetadata", "PeeringRoute", + "Principal", "PrivateCloud", "PrivateConnection", + "RepairManagementDnsZoneBindingRequest", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", + "RevokeDnsBindPermissionRequest", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", + "StretchedClusterConfig", "Subnet", "UndeletePrivateCloudRequest", "UpdateClusterRequest", + "UpdateDnsForwardingRequest", + "UpdateExternalAccessRuleRequest", + "UpdateExternalAddressRequest", + "UpdateLoggingServerRequest", + "UpdateManagementDnsZoneBindingRequest", + "UpdateNetworkPeeringRequest", "UpdateNetworkPolicyRequest", "UpdatePrivateCloudRequest", "UpdatePrivateConnectionRequest", diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_metadata.json b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_metadata.json index 080ab9e4e5b6..65b47d5cfa55 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_metadata.json +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_metadata.json @@ -15,11 +15,36 @@ "create_cluster" ] }, + "CreateExternalAccessRule": { + "methods": [ + "create_external_access_rule" + ] + }, + "CreateExternalAddress": { + "methods": [ + "create_external_address" + ] + }, "CreateHcxActivationKey": { "methods": [ "create_hcx_activation_key" ] }, + "CreateLoggingServer": { + "methods": [ + "create_logging_server" + ] + }, + "CreateManagementDnsZoneBinding": { + "methods": [ + "create_management_dns_zone_binding" + ] + }, + "CreateNetworkPeering": { + "methods": [ + "create_network_peering" + ] + }, "CreateNetworkPolicy": { "methods": [ "create_network_policy" @@ -45,6 +70,31 @@ "delete_cluster" ] }, + "DeleteExternalAccessRule": { + "methods": [ + "delete_external_access_rule" + ] + }, + "DeleteExternalAddress": { + "methods": [ + "delete_external_address" + ] + }, + "DeleteLoggingServer": { + "methods": [ + "delete_logging_server" + ] + }, + "DeleteManagementDnsZoneBinding": { + "methods": [ + "delete_management_dns_zone_binding" + ] + }, + "DeleteNetworkPeering": { + "methods": [ + "delete_network_peering" + ] + }, "DeleteNetworkPolicy": { "methods": [ "delete_network_policy" @@ -65,21 +115,66 @@ "delete_vmware_engine_network" ] }, + "FetchNetworkPolicyExternalAddresses": { + "methods": [ + "fetch_network_policy_external_addresses" + ] + }, "GetCluster": { "methods": [ "get_cluster" ] }, + "GetDnsBindPermission": { + "methods": [ + "get_dns_bind_permission" + ] + }, + "GetDnsForwarding": { + "methods": [ + "get_dns_forwarding" + ] + }, + "GetExternalAccessRule": { + "methods": [ + "get_external_access_rule" + ] + }, + "GetExternalAddress": { + "methods": [ + "get_external_address" + ] + }, "GetHcxActivationKey": { "methods": [ "get_hcx_activation_key" ] }, + "GetLoggingServer": { + "methods": [ + "get_logging_server" + ] + }, + "GetManagementDnsZoneBinding": { + "methods": [ + "get_management_dns_zone_binding" + ] + }, + "GetNetworkPeering": { + "methods": [ + "get_network_peering" + ] + }, "GetNetworkPolicy": { "methods": [ "get_network_policy" ] }, + "GetNode": { + "methods": [ + "get_node" + ] + }, "GetNodeType": { "methods": [ "get_node_type" @@ -105,16 +200,46 @@ "get_vmware_engine_network" ] }, + "GrantDnsBindPermission": { + "methods": [ + "grant_dns_bind_permission" + ] + }, "ListClusters": { "methods": [ "list_clusters" ] }, + "ListExternalAccessRules": { + "methods": [ + "list_external_access_rules" + ] + }, + "ListExternalAddresses": { + "methods": [ + "list_external_addresses" + ] + }, "ListHcxActivationKeys": { "methods": [ "list_hcx_activation_keys" ] }, + "ListLoggingServers": { + "methods": [ + "list_logging_servers" + ] + }, + "ListManagementDnsZoneBindings": { + "methods": [ + "list_management_dns_zone_bindings" + ] + }, + "ListNetworkPeerings": { + "methods": [ + "list_network_peerings" + ] + }, "ListNetworkPolicies": { "methods": [ "list_network_policies" @@ -125,6 +250,16 @@ "list_node_types" ] }, + "ListNodes": { + "methods": [ + "list_nodes" + ] + }, + "ListPeeringRoutes": { + "methods": [ + "list_peering_routes" + ] + }, "ListPrivateClouds": { "methods": [ "list_private_clouds" @@ -150,6 +285,11 @@ "list_vmware_engine_networks" ] }, + "RepairManagementDnsZoneBinding": { + "methods": [ + "repair_management_dns_zone_binding" + ] + }, "ResetNsxCredentials": { "methods": [ "reset_nsx_credentials" @@ -160,6 +300,11 @@ "reset_vcenter_credentials" ] }, + "RevokeDnsBindPermission": { + "methods": [ + "revoke_dns_bind_permission" + ] + }, "ShowNsxCredentials": { "methods": [ "show_nsx_credentials" @@ -180,6 +325,36 @@ "update_cluster" ] }, + "UpdateDnsForwarding": { + "methods": [ + "update_dns_forwarding" + ] + }, + "UpdateExternalAccessRule": { + "methods": [ + "update_external_access_rule" + ] + }, + "UpdateExternalAddress": { + "methods": [ + "update_external_address" + ] + }, + "UpdateLoggingServer": { + "methods": [ + "update_logging_server" + ] + }, + "UpdateManagementDnsZoneBinding": { + "methods": [ + "update_management_dns_zone_binding" + ] + }, + "UpdateNetworkPeering": { + "methods": [ + "update_network_peering" + ] + }, "UpdateNetworkPolicy": { "methods": [ "update_network_policy" @@ -215,11 +390,36 @@ "create_cluster" ] }, + "CreateExternalAccessRule": { + "methods": [ + "create_external_access_rule" + ] + }, + "CreateExternalAddress": { + "methods": [ + "create_external_address" + ] + }, "CreateHcxActivationKey": { "methods": [ "create_hcx_activation_key" ] }, + "CreateLoggingServer": { + "methods": [ + "create_logging_server" + ] + }, + "CreateManagementDnsZoneBinding": { + "methods": [ + "create_management_dns_zone_binding" + ] + }, + "CreateNetworkPeering": { + "methods": [ + "create_network_peering" + ] + }, "CreateNetworkPolicy": { "methods": [ "create_network_policy" @@ -245,6 +445,31 @@ "delete_cluster" ] }, + "DeleteExternalAccessRule": { + "methods": [ + "delete_external_access_rule" + ] + }, + "DeleteExternalAddress": { + "methods": [ + "delete_external_address" + ] + }, + "DeleteLoggingServer": { + "methods": [ + "delete_logging_server" + ] + }, + "DeleteManagementDnsZoneBinding": { + "methods": [ + "delete_management_dns_zone_binding" + ] + }, + "DeleteNetworkPeering": { + "methods": [ + "delete_network_peering" + ] + }, "DeleteNetworkPolicy": { "methods": [ "delete_network_policy" @@ -265,21 +490,66 @@ "delete_vmware_engine_network" ] }, + "FetchNetworkPolicyExternalAddresses": { + "methods": [ + "fetch_network_policy_external_addresses" + ] + }, "GetCluster": { "methods": [ "get_cluster" ] }, + "GetDnsBindPermission": { + "methods": [ + "get_dns_bind_permission" + ] + }, + "GetDnsForwarding": { + "methods": [ + "get_dns_forwarding" + ] + }, + "GetExternalAccessRule": { + "methods": [ + "get_external_access_rule" + ] + }, + "GetExternalAddress": { + "methods": [ + "get_external_address" + ] + }, "GetHcxActivationKey": { "methods": [ "get_hcx_activation_key" ] }, + "GetLoggingServer": { + "methods": [ + "get_logging_server" + ] + }, + "GetManagementDnsZoneBinding": { + "methods": [ + "get_management_dns_zone_binding" + ] + }, + "GetNetworkPeering": { + "methods": [ + "get_network_peering" + ] + }, "GetNetworkPolicy": { "methods": [ "get_network_policy" ] }, + "GetNode": { + "methods": [ + "get_node" + ] + }, "GetNodeType": { "methods": [ "get_node_type" @@ -305,16 +575,46 @@ "get_vmware_engine_network" ] }, + "GrantDnsBindPermission": { + "methods": [ + "grant_dns_bind_permission" + ] + }, "ListClusters": { "methods": [ "list_clusters" ] }, + "ListExternalAccessRules": { + "methods": [ + "list_external_access_rules" + ] + }, + "ListExternalAddresses": { + "methods": [ + "list_external_addresses" + ] + }, "ListHcxActivationKeys": { "methods": [ "list_hcx_activation_keys" ] }, + "ListLoggingServers": { + "methods": [ + "list_logging_servers" + ] + }, + "ListManagementDnsZoneBindings": { + "methods": [ + "list_management_dns_zone_bindings" + ] + }, + "ListNetworkPeerings": { + "methods": [ + "list_network_peerings" + ] + }, "ListNetworkPolicies": { "methods": [ "list_network_policies" @@ -325,6 +625,16 @@ "list_node_types" ] }, + "ListNodes": { + "methods": [ + "list_nodes" + ] + }, + "ListPeeringRoutes": { + "methods": [ + "list_peering_routes" + ] + }, "ListPrivateClouds": { "methods": [ "list_private_clouds" @@ -350,6 +660,11 @@ "list_vmware_engine_networks" ] }, + "RepairManagementDnsZoneBinding": { + "methods": [ + "repair_management_dns_zone_binding" + ] + }, "ResetNsxCredentials": { "methods": [ "reset_nsx_credentials" @@ -360,6 +675,11 @@ "reset_vcenter_credentials" ] }, + "RevokeDnsBindPermission": { + "methods": [ + "revoke_dns_bind_permission" + ] + }, "ShowNsxCredentials": { "methods": [ "show_nsx_credentials" @@ -380,6 +700,36 @@ "update_cluster" ] }, + "UpdateDnsForwarding": { + "methods": [ + "update_dns_forwarding" + ] + }, + "UpdateExternalAccessRule": { + "methods": [ + "update_external_access_rule" + ] + }, + "UpdateExternalAddress": { + "methods": [ + "update_external_address" + ] + }, + "UpdateLoggingServer": { + "methods": [ + "update_logging_server" + ] + }, + "UpdateManagementDnsZoneBinding": { + "methods": [ + "update_management_dns_zone_binding" + ] + }, + "UpdateNetworkPeering": { + "methods": [ + "update_network_peering" + ] + }, "UpdateNetworkPolicy": { "methods": [ "update_network_policy" @@ -415,11 +765,36 @@ "create_cluster" ] }, + "CreateExternalAccessRule": { + "methods": [ + "create_external_access_rule" + ] + }, + "CreateExternalAddress": { + "methods": [ + "create_external_address" + ] + }, "CreateHcxActivationKey": { "methods": [ "create_hcx_activation_key" ] }, + "CreateLoggingServer": { + "methods": [ + "create_logging_server" + ] + }, + "CreateManagementDnsZoneBinding": { + "methods": [ + "create_management_dns_zone_binding" + ] + }, + "CreateNetworkPeering": { + "methods": [ + "create_network_peering" + ] + }, "CreateNetworkPolicy": { "methods": [ "create_network_policy" @@ -445,6 +820,31 @@ "delete_cluster" ] }, + "DeleteExternalAccessRule": { + "methods": [ + "delete_external_access_rule" + ] + }, + "DeleteExternalAddress": { + "methods": [ + "delete_external_address" + ] + }, + "DeleteLoggingServer": { + "methods": [ + "delete_logging_server" + ] + }, + "DeleteManagementDnsZoneBinding": { + "methods": [ + "delete_management_dns_zone_binding" + ] + }, + "DeleteNetworkPeering": { + "methods": [ + "delete_network_peering" + ] + }, "DeleteNetworkPolicy": { "methods": [ "delete_network_policy" @@ -465,21 +865,66 @@ "delete_vmware_engine_network" ] }, + "FetchNetworkPolicyExternalAddresses": { + "methods": [ + "fetch_network_policy_external_addresses" + ] + }, "GetCluster": { "methods": [ "get_cluster" ] }, + "GetDnsBindPermission": { + "methods": [ + "get_dns_bind_permission" + ] + }, + "GetDnsForwarding": { + "methods": [ + "get_dns_forwarding" + ] + }, + "GetExternalAccessRule": { + "methods": [ + "get_external_access_rule" + ] + }, + "GetExternalAddress": { + "methods": [ + "get_external_address" + ] + }, "GetHcxActivationKey": { "methods": [ "get_hcx_activation_key" ] }, + "GetLoggingServer": { + "methods": [ + "get_logging_server" + ] + }, + "GetManagementDnsZoneBinding": { + "methods": [ + "get_management_dns_zone_binding" + ] + }, + "GetNetworkPeering": { + "methods": [ + "get_network_peering" + ] + }, "GetNetworkPolicy": { "methods": [ "get_network_policy" ] }, + "GetNode": { + "methods": [ + "get_node" + ] + }, "GetNodeType": { "methods": [ "get_node_type" @@ -505,16 +950,46 @@ "get_vmware_engine_network" ] }, + "GrantDnsBindPermission": { + "methods": [ + "grant_dns_bind_permission" + ] + }, "ListClusters": { "methods": [ "list_clusters" ] }, + "ListExternalAccessRules": { + "methods": [ + "list_external_access_rules" + ] + }, + "ListExternalAddresses": { + "methods": [ + "list_external_addresses" + ] + }, "ListHcxActivationKeys": { "methods": [ "list_hcx_activation_keys" ] }, + "ListLoggingServers": { + "methods": [ + "list_logging_servers" + ] + }, + "ListManagementDnsZoneBindings": { + "methods": [ + "list_management_dns_zone_bindings" + ] + }, + "ListNetworkPeerings": { + "methods": [ + "list_network_peerings" + ] + }, "ListNetworkPolicies": { "methods": [ "list_network_policies" @@ -525,6 +1000,16 @@ "list_node_types" ] }, + "ListNodes": { + "methods": [ + "list_nodes" + ] + }, + "ListPeeringRoutes": { + "methods": [ + "list_peering_routes" + ] + }, "ListPrivateClouds": { "methods": [ "list_private_clouds" @@ -550,6 +1035,11 @@ "list_vmware_engine_networks" ] }, + "RepairManagementDnsZoneBinding": { + "methods": [ + "repair_management_dns_zone_binding" + ] + }, "ResetNsxCredentials": { "methods": [ "reset_nsx_credentials" @@ -560,6 +1050,11 @@ "reset_vcenter_credentials" ] }, + "RevokeDnsBindPermission": { + "methods": [ + "revoke_dns_bind_permission" + ] + }, "ShowNsxCredentials": { "methods": [ "show_nsx_credentials" @@ -580,6 +1075,36 @@ "update_cluster" ] }, + "UpdateDnsForwarding": { + "methods": [ + "update_dns_forwarding" + ] + }, + "UpdateExternalAccessRule": { + "methods": [ + "update_external_access_rule" + ] + }, + "UpdateExternalAddress": { + "methods": [ + "update_external_address" + ] + }, + "UpdateLoggingServer": { + "methods": [ + "update_logging_server" + ] + }, + "UpdateManagementDnsZoneBinding": { + "methods": [ + "update_management_dns_zone_binding" + ] + }, + "UpdateNetworkPeering": { + "methods": [ + "update_network_peering" + ] + }, "UpdateNetworkPolicy": { "methods": [ "update_network_policy" diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py index 2968f1104f7a..5f7437f6f8d0 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.2.0" # {x-release-please-version} +__version__ = "1.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py index dd640d368dbe..99619f7a7667 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/async_client.py @@ -70,16 +70,50 @@ class VmwareEngineAsyncClient: cluster_path = staticmethod(VmwareEngineClient.cluster_path) parse_cluster_path = staticmethod(VmwareEngineClient.parse_cluster_path) + dns_bind_permission_path = staticmethod(VmwareEngineClient.dns_bind_permission_path) + parse_dns_bind_permission_path = staticmethod( + VmwareEngineClient.parse_dns_bind_permission_path + ) + dns_forwarding_path = staticmethod(VmwareEngineClient.dns_forwarding_path) + parse_dns_forwarding_path = staticmethod( + VmwareEngineClient.parse_dns_forwarding_path + ) + external_access_rule_path = staticmethod( + VmwareEngineClient.external_access_rule_path + ) + parse_external_access_rule_path = staticmethod( + VmwareEngineClient.parse_external_access_rule_path + ) + external_address_path = staticmethod(VmwareEngineClient.external_address_path) + parse_external_address_path = staticmethod( + VmwareEngineClient.parse_external_address_path + ) hcx_activation_key_path = staticmethod(VmwareEngineClient.hcx_activation_key_path) parse_hcx_activation_key_path = staticmethod( VmwareEngineClient.parse_hcx_activation_key_path ) + logging_server_path = staticmethod(VmwareEngineClient.logging_server_path) + parse_logging_server_path = staticmethod( + VmwareEngineClient.parse_logging_server_path + ) + management_dns_zone_binding_path = staticmethod( + VmwareEngineClient.management_dns_zone_binding_path + ) + parse_management_dns_zone_binding_path = staticmethod( + VmwareEngineClient.parse_management_dns_zone_binding_path + ) network_path = staticmethod(VmwareEngineClient.network_path) parse_network_path = staticmethod(VmwareEngineClient.parse_network_path) + network_peering_path = staticmethod(VmwareEngineClient.network_peering_path) + parse_network_peering_path = staticmethod( + VmwareEngineClient.parse_network_peering_path + ) network_policy_path = staticmethod(VmwareEngineClient.network_policy_path) parse_network_policy_path = staticmethod( VmwareEngineClient.parse_network_policy_path ) + node_path = staticmethod(VmwareEngineClient.node_path) + parse_node_path = staticmethod(VmwareEngineClient.parse_node_path) node_type_path = staticmethod(VmwareEngineClient.node_type_path) parse_node_type_path = staticmethod(VmwareEngineClient.parse_node_type_path) private_cloud_path = staticmethod(VmwareEngineClient.private_cloud_path) @@ -434,8 +468,9 @@ async def sample_get_private_cloud(): Returns: google.cloud.vmwareengine_v1.types.PrivateCloud: - Represents a private cloud resource. - Private clouds are zonal resources. + Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -501,10 +536,10 @@ async def create_private_cloud( metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new ``PrivateCloud`` resource in a given project and - location. Private clouds can only be created in zones, regional - private clouds are not supported. - - Creating a private cloud also creates a `management + location. Private clouds of type ``STANDARD`` and + ``TIME_LIMITED`` are zonal resources, ``STRETCHED`` private + clouds are regional. Creating a private cloud also creates a + `management cluster `__ for that private cloud. @@ -594,10 +629,9 @@ async def sample_create_private_cloud(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -738,10 +772,9 @@ async def sample_update_private_cloud(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -881,10 +914,9 @@ async def sample_delete_private_cloud(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1007,10 +1039,9 @@ async def sample_undelete_private_cloud(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1471,9 +1502,8 @@ async def update_cluster( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Modifies a ``Cluster`` resource. Only the following fields can - be updated: ``node_type_configs.*.node_count``. Only fields - specified in ``updateMask`` are applied. + r"""Modifies a ``Cluster`` resource. Only fields specified in + ``updateMask`` are applied. During operation processing, the resource is temporarily in the ``ACTIVE`` state before the operation fully completes. For that @@ -1731,16 +1761,16 @@ async def sample_delete_cluster(): # Done; return the response. return response - async def list_subnets( + async def list_nodes( self, - request: Optional[Union[vmwareengine.ListSubnetsRequest, dict]] = None, + request: Optional[Union[vmwareengine.ListNodesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSubnetsAsyncPager: - r"""Lists subnets in a given private cloud. + ) -> pagers.ListNodesAsyncPager: + r"""Lists nodes in a given cluster. .. code-block:: python @@ -1753,33 +1783,33 @@ async def list_subnets( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_subnets(): + async def sample_list_nodes(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListSubnetsRequest( + request = vmwareengine_v1.ListNodesRequest( parent="parent_value", ) # Make the request - page_result = client.list_subnets(request=request) + page_result = client.list_nodes(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListSubnetsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNodesRequest, dict]]): The request object. Request message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] parent (:class:`str`): - Required. The resource name of the private cloud to be - queried for subnets. Resource names are schemeless URIs - that follow the conventions in + Required. The resource name of the cluster to be queried + for nodes. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1791,9 +1821,9 @@ async def sample_list_subnets(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsAsyncPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodesAsyncPager: Response message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] Iterating over this object will yield results and resolve additional pages automatically. @@ -1809,7 +1839,7 @@ async def sample_list_subnets(): "the individual field arguments should be set." ) - request = vmwareengine.ListSubnetsRequest(request) + request = vmwareengine.ListNodesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1819,7 +1849,7 @@ async def sample_list_subnets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_subnets, + self._client._transport.list_nodes, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -1849,7 +1879,7 @@ async def sample_list_subnets(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListSubnetsAsyncPager( + response = pagers.ListNodesAsyncPager( method=rpc, request=request, response=response, @@ -1859,16 +1889,16 @@ async def sample_list_subnets(): # Done; return the response. return response - async def get_subnet( + async def get_node( self, - request: Optional[Union[vmwareengine.GetSubnetRequest, dict]] = None, + request: Optional[Union[vmwareengine.GetNodeRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Subnet: - r"""Gets details of a single subnet. + ) -> vmwareengine_resources.Node: + r"""Gets details of a single node. .. code-block:: python @@ -1881,32 +1911,29 @@ async def get_subnet( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_subnet(): + async def sample_get_node(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetSubnetRequest( + request = vmwareengine_v1.GetNodeRequest( name="name_value", ) # Make the request - response = await client.get_subnet(request=request) + response = await client.get_node(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetSubnetRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNodeRequest, dict]]): The request object. Request message for - [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + [VmwareEngine.GetNode][google.cloud.vmwareengine.v1.VmwareEngine.GetNode] name (:class:`str`): - Required. The resource name of the subnet to retrieve. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For + Required. The resource name of the node to retrieve. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` + ``projects/{project}/locations/{location}/privateClouds/{private_cloud}/clusters/{cluster}/nodes/{node}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1918,11 +1945,8 @@ async def sample_get_subnet(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.Subnet: - Subnet in a private cloud. Either management subnets (such as vMotion) that - are read-only, or userDefined, which can also be - updated. - + google.cloud.vmwareengine_v1.types.Node: + Node in a cluster. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -1934,7 +1958,7 @@ async def sample_get_subnet(): "the individual field arguments should be set." ) - request = vmwareengine.GetSubnetRequest(request) + request = vmwareengine.GetNodeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1944,7 +1968,7 @@ async def sample_get_subnet(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_subnet, + self._client._transport.get_node, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -1975,22 +1999,19 @@ async def sample_get_subnet(): # Done; return the response. return response - async def update_subnet( + async def list_external_addresses( self, - request: Optional[Union[vmwareengine.UpdateSubnetRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.ListExternalAddressesRequest, dict] + ] = None, *, - subnet: Optional[vmwareengine_resources.Subnet] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single subnet. Only fields specified - in ``update_mask`` are applied. - - *Note*: This API is synchronous and always returns a successful - ``google.longrunning.Operation`` (LRO). The returned LRO will - only have ``done`` and ``response`` fields. + ) -> pagers.ListExternalAddressesAsyncPager: + r"""Lists external IP addresses assigned to VMware + workload VMs in a given private cloud. .. code-block:: python @@ -2003,42 +2024,35 @@ async def update_subnet( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_update_subnet(): + async def sample_list_external_addresses(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.UpdateSubnetRequest( + request = vmwareengine_v1.ListExternalAddressesRequest( + parent="parent_value", ) # Make the request - operation = client.update_subnet(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + page_result = client.list_external_addresses(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateSubnetRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest, dict]]): The request object. Request message for - [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] - subnet (:class:`google.cloud.vmwareengine_v1.types.Subnet`): - Required. Subnet description. - This corresponds to the ``subnet`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the ``Subnet`` resource by the update. - The fields specified in the ``update_mask`` are relative - to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] + parent (:class:`str`): + Required. The resource name of the private cloud to be + queried for external IP addresses. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2048,47 +2062,52 @@ async def sample_update_subnet(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAddressesAsyncPager: + Response message for + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.Subnet` Subnet in a private cloud. Either management subnets (such as vMotion) that - are read-only, or userDefined, which can also be - updated. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subnet, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.UpdateSubnetRequest(request) + request = vmwareengine.ListExternalAddressesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if subnet is not None: - request.subnet = subnet - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_subnet, - default_timeout=60.0, + self._client._transport.list_external_addresses, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("subnet.name", request.subnet.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2099,27 +2118,32 @@ async def sample_update_subnet(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - vmwareengine_resources.Subnet, - metadata_type=vmwareengine.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExternalAddressesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - async def list_node_types( + async def fetch_network_policy_external_addresses( self, - request: Optional[Union[vmwareengine.ListNodeTypesRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.FetchNetworkPolicyExternalAddressesRequest, dict] + ] = None, *, - parent: Optional[str] = None, + network_policy: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNodeTypesAsyncPager: - r"""Lists node types + ) -> pagers.FetchNetworkPolicyExternalAddressesAsyncPager: + r"""Lists external IP addresses assigned to VMware + workload VMs within the scope of the given network + policy. .. code-block:: python @@ -2132,34 +2156,35 @@ async def list_node_types( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_node_types(): + async def sample_fetch_network_policy_external_addresses(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListNodeTypesRequest( - parent="parent_value", + request = vmwareengine_v1.FetchNetworkPolicyExternalAddressesRequest( + network_policy="network_policy_value", ) # Make the request - page_result = client.list_node_types(request=request) + page_result = client.fetch_network_policy_external_addresses(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNodeTypesRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest, dict]]): The request object. Request message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] - parent (:class:`str`): - Required. The resource name of the location to be - queried for node types. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] + network_policy (:class:`str`): + Required. The resource name of the network policy to + query for assigned external IP addresses. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1-a`` + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` - This corresponds to the ``parent`` field + This corresponds to the ``network_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2169,9 +2194,9 @@ async def sample_list_node_types(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesAsyncPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.FetchNetworkPolicyExternalAddressesAsyncPager: Response message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] Iterating over this object will yield results and resolve additional pages automatically. @@ -2180,41 +2205,34 @@ async def sample_list_node_types(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([network_policy]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ListNodeTypesRequest(request) + request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if network_policy is not None: + request.network_policy = network_policy # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_node_types, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.fetch_network_policy_external_addresses, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("network_policy", request.network_policy),) + ), ) # Send the request. @@ -2227,7 +2245,7 @@ async def sample_list_node_types(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListNodeTypesAsyncPager( + response = pagers.FetchNetworkPolicyExternalAddressesAsyncPager( method=rpc, request=request, response=response, @@ -2237,16 +2255,16 @@ async def sample_list_node_types(): # Done; return the response. return response - async def get_node_type( + async def get_external_address( self, - request: Optional[Union[vmwareengine.GetNodeTypeRequest, dict]] = None, + request: Optional[Union[vmwareengine.GetExternalAddressRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NodeType: - r"""Gets details of a single ``NodeType``. + ) -> vmwareengine_resources.ExternalAddress: + r"""Gets details of a single external IP address. .. code-block:: python @@ -2259,32 +2277,32 @@ async def get_node_type( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_node_type(): + async def sample_get_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetNodeTypeRequest( + request = vmwareengine_v1.GetExternalAddressRequest( name="name_value", ) # Make the request - response = await client.get_node_type(request=request) + response = await client.get_external_address(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNodeTypeRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetExternalAddressRequest, dict]]): The request object. Request message for - [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + [VmwareEngine.GetExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress] name (:class:`str`): - Required. The resource name of the node type to - retrieve. Resource names are schemeless URIs that follow - the conventions in + Required. The resource name of the external IP address + to retrieve. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2296,8 +2314,11 @@ async def sample_get_node_type(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.NodeType: - Describes node type. + google.cloud.vmwareengine_v1.types.ExternalAddress: + Represents an allocated external IP + address and its corresponding internal + IP address in a private cloud. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2309,7 +2330,7 @@ async def sample_get_node_type(): "the individual field arguments should be set." ) - request = vmwareengine.GetNodeTypeRequest(request) + request = vmwareengine.GetExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2319,7 +2340,7 @@ async def sample_get_node_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_node_type, + self._client._transport.get_external_address, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -2350,16 +2371,23 @@ async def sample_get_node_type(): # Done; return the response. return response - async def show_nsx_credentials( + async def create_external_address( self, - request: Optional[Union[vmwareengine.ShowNsxCredentialsRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.CreateExternalAddressRequest, dict] + ] = None, *, - private_cloud: Optional[str] = None, + parent: Optional[str] = None, + external_address: Optional[vmwareengine_resources.ExternalAddress] = None, + external_address_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Gets details of credentials for NSX appliance. + ) -> operation_async.AsyncOperation: + r"""Creates a new ``ExternalAddress`` resource in a given private + cloud. The network policy that corresponds to the private cloud + must have the external IP address network service enabled + (``NetworkPolicy.external_ip``). .. code-block:: python @@ -2372,86 +2400,114 @@ async def show_nsx_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_show_nsx_credentials(): + async def sample_create_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ShowNsxCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.CreateExternalAddressRequest( + parent="parent_value", + external_address_id="external_address_id_value", ) # Make the request - response = await client.show_nsx_credentials(request=request) + operation = client.create_external_address(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateExternalAddressRequest, dict]]): The request object. Request message for - [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] - private_cloud (:class:`str`): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.CreateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress] + parent (:class:`str`): + Required. The resource name of the private cloud to + create a new external IP address in. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + external_address (:class:`google.cloud.vmwareengine_v1.types.ExternalAddress`): + Required. The initial description of + a new external IP address. - Returns: - google.cloud.vmwareengine_v1.types.Credentials: - Credentials for a private cloud. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have + This corresponds to the ``external_address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_address_id (:class:`str`): + Required. The user-provided identifier of the + ``ExternalAddress`` to be created. This identifier must + be unique among ``ExternalAddress`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``external_address_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAddress` Represents an allocated external IP address and its corresponding internal IP + address in a private cloud. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([parent, external_address, external_address_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ShowNsxCredentialsRequest(request) + request = vmwareengine.CreateExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if parent is not None: + request.parent = parent + if external_address is not None: + request.external_address = external_address + if external_address_id is not None: + request.external_address_id = external_address_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.show_nsx_credentials, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.create_external_address, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2462,21 +2518,36 @@ async def sample_show_nsx_credentials(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ExternalAddress, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - async def show_vcenter_credentials( + async def update_external_address( self, request: Optional[ - Union[vmwareengine.ShowVcenterCredentialsRequest, dict] + Union[vmwareengine.UpdateExternalAddressRequest, dict] ] = None, *, - private_cloud: Optional[str] = None, + external_address: Optional[vmwareengine_resources.ExternalAddress] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Gets details of credentials for Vcenter appliance. + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single external IP address. Only + fields specified in ``update_mask`` are applied. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. .. code-block:: python @@ -2489,34 +2560,45 @@ async def show_vcenter_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_show_vcenter_credentials(): + async def sample_update_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ShowVcenterCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.UpdateExternalAddressRequest( ) # Make the request - response = await client.show_vcenter_credentials(request=request) + operation = client.update_external_address(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateExternalAddressRequest, dict]]): The request object. Request message for - [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] - private_cloud (:class:`str`): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless - URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + [VmwareEngine.UpdateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress] + external_address (:class:`google.cloud.vmwareengine_v1.types.ExternalAddress`): + Required. External IP address + description. - This corresponds to the ``private_cloud`` field + This corresponds to the ``external_address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAddress`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2526,40 +2608,37 @@ async def sample_show_vcenter_credentials(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.Credentials: - Credentials for a private cloud. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAddress` Represents an allocated external IP address and its corresponding internal IP + address in a private cloud. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([external_address, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ShowVcenterCredentialsRequest(request) + request = vmwareengine.UpdateExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if external_address is not None: + request.external_address = external_address + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.show_vcenter_credentials, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.update_external_address, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2567,7 +2646,7 @@ async def sample_show_vcenter_credentials(): # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) + (("external_address.name", request.external_address.name),) ), ) @@ -2579,19 +2658,32 @@ async def sample_show_vcenter_credentials(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ExternalAddress, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - async def reset_nsx_credentials( + async def delete_external_address( self, - request: Optional[Union[vmwareengine.ResetNsxCredentialsRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeleteExternalAddressRequest, dict] + ] = None, *, - private_cloud: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Resets credentials of the NSX appliance. + r"""Deletes a single external IP address. When you delete + an external IP address, connectivity between the + external IP address and the corresponding internal IP + address is lost. .. code-block:: python @@ -2604,17 +2696,17 @@ async def reset_nsx_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_reset_nsx_credentials(): + async def sample_delete_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ResetNsxCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.DeleteExternalAddressRequest( + name="name_value", ) # Make the request - operation = client.reset_nsx_credentials(request=request) + operation = client.delete_external_address(request=request) print("Waiting for operation to complete...") @@ -2624,18 +2716,18 @@ async def sample_reset_nsx_credentials(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteExternalAddressRequest, dict]]): The request object. Request message for - [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] - private_cloud (:class:`str`): - Required. The resource name of the private cloud to - reset credentials for. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.DeleteExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress] + name (:class:`str`): + Required. The resource name of the external IP address + to delete. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2648,33 +2740,39 @@ async def sample_reset_nsx_credentials(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ResetNsxCredentialsRequest(request) + request = vmwareengine.DeleteExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reset_nsx_credentials, + self._client._transport.delete_external_address, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2682,9 +2780,7 @@ async def sample_reset_nsx_credentials(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -2699,25 +2795,23 @@ async def sample_reset_nsx_credentials(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.PrivateCloud, + empty_pb2.Empty, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def reset_vcenter_credentials( + async def list_subnets( self, - request: Optional[ - Union[vmwareengine.ResetVcenterCredentialsRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.ListSubnetsRequest, dict]] = None, *, - private_cloud: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Resets credentials of the Vcenter appliance. + ) -> pagers.ListSubnetsAsyncPager: + r"""Lists subnets in a given private cloud. .. code-block:: python @@ -2730,38 +2824,35 @@ async def reset_vcenter_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_reset_vcenter_credentials(): + async def sample_list_subnets(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ResetVcenterCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.ListSubnetsRequest( + parent="parent_value", ) # Make the request - operation = client.reset_vcenter_credentials(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + page_result = client.list_subnets(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListSubnetsRequest, dict]]): The request object. Request message for - [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] - private_cloud (:class:`str`): - Required. The resource name of the private cloud to - reset credentials for. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + parent (:class:`str`): + Required. The resource name of the private cloud to be + queried for subnets. Resource names are schemeless URIs + that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2771,46 +2862,52 @@ async def sample_reset_vcenter_credentials(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsAsyncPager: + Response message for + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ResetVcenterCredentialsRequest(request) + request = vmwareengine.ListSubnetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reset_vcenter_credentials, - default_timeout=None, + self._client._transport.list_subnets, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2821,32 +2918,28 @@ async def sample_reset_vcenter_credentials(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - vmwareengine_resources.PrivateCloud, - metadata_type=vmwareengine.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubnetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - async def create_hcx_activation_key( + async def get_subnet( self, - request: Optional[ - Union[vmwareengine.CreateHcxActivationKeyRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetSubnetRequest, dict]] = None, *, - parent: Optional[str] = None, - hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, - hcx_activation_key_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new HCX activation key in a given private - cloud. + ) -> vmwareengine_resources.Subnet: + r"""Gets details of a single subnet. .. code-block:: python @@ -2859,67 +2952,164 @@ async def create_hcx_activation_key( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_create_hcx_activation_key(): + async def sample_get_subnet(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.CreateHcxActivationKeyRequest( - parent="parent_value", - hcx_activation_key_id="hcx_activation_key_id_value", + request = vmwareengine_v1.GetSubnetRequest( + name="name_value", ) # Make the request - operation = client.create_hcx_activation_key(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + response = await client.get_subnet(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetSubnetRequest, dict]]): The request object. Request message for - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - parent (:class:`str`): - Required. The resource name of the private cloud to - create the key for. Resource names are schemeless URIs - that follow the conventions in + [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + name (:class:`str`): + Required. The resource name of the subnet to retrieve. + Resource names are schemeless URIs that follow the + conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` - This corresponds to the ``parent`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - hcx_activation_key (:class:`google.cloud.vmwareengine_v1.types.HcxActivationKey`): - Required. The initial description of - a new HCX activation key. When creating - a new key, this field must be an empty - object. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - This corresponds to the ``hcx_activation_key`` field + Returns: + google.cloud.vmwareengine_v1.types.Subnet: + Subnet in a private cloud. Either management subnets (such as vMotion) that + are read-only, or userDefined, which can also be + updated. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetSubnetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_subnet, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_subnet( + self, + request: Optional[Union[vmwareengine.UpdateSubnetRequest, dict]] = None, + *, + subnet: Optional[vmwareengine_resources.Subnet] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single subnet. Only fields specified + in ``update_mask`` are applied. + + *Note*: This API is synchronous and always returns a successful + ``google.longrunning.Operation`` (LRO). The returned LRO will + only have ``done`` and ``response`` fields. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_subnet(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.UpdateSubnetRequest( + ) + + # Make the request + operation = client.update_subnet(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateSubnetRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] + subnet (:class:`google.cloud.vmwareengine_v1.types.Subnet`): + Required. Subnet description. + This corresponds to the ``subnet`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - hcx_activation_key_id (:class:`str`): - Required. The user-provided identifier of the - ``HcxActivationKey`` to be created. This identifier must - be unique among ``HcxActivationKey`` resources within - the parent and becomes the final token in the name URI. - The identifier must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``Subnet`` resource by the update. + The fields specified in the ``update_mask`` are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. - This corresponds to the ``hcx_activation_key_id`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -2932,49 +3122,44 @@ async def sample_create_hcx_activation_key(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.HcxActivationKey` HCX activation key. A default key is created during - private cloud provisioning, but this behavior is - subject to change and you should always verify active - keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.Subnet` Subnet in a private cloud. Either management subnets (such as vMotion) that + are read-only, or userDefined, which can also be + updated. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, hcx_activation_key, hcx_activation_key_id]) + has_flattened_params = any([subnet, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.CreateHcxActivationKeyRequest(request) + request = vmwareengine.UpdateSubnetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if hcx_activation_key is not None: - request.hcx_activation_key = hcx_activation_key - if hcx_activation_key_id is not None: - request.hcx_activation_key_id = hcx_activation_key_id + if subnet is not None: + request.subnet = subnet + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_hcx_activation_key, - default_timeout=None, + self._client._transport.update_subnet, + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("subnet.name", request.subnet.name),) + ), ) # Send the request. @@ -2989,25 +3174,26 @@ async def sample_create_hcx_activation_key(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.HcxActivationKey, + vmwareengine_resources.Subnet, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def list_hcx_activation_keys( + async def list_external_access_rules( self, request: Optional[ - Union[vmwareengine.ListHcxActivationKeysRequest, dict] + Union[vmwareengine.ListExternalAccessRulesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListHcxActivationKeysAsyncPager: - r"""Lists ``HcxActivationKey`` resources in a given private cloud. + ) -> pagers.ListExternalAccessRulesAsyncPager: + r"""Lists ``ExternalAccessRule`` resources in the specified network + policy. .. code-block:: python @@ -3020,33 +3206,33 @@ async def list_hcx_activation_keys( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_hcx_activation_keys(): + async def sample_list_external_access_rules(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListHcxActivationKeysRequest( + request = vmwareengine_v1.ListExternalAccessRulesRequest( parent="parent_value", ) # Make the request - page_result = client.list_hcx_activation_keys(request=request) + page_result = client.list_external_access_rules(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest, dict]]): The request object. Request message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] parent (:class:`str`): - Required. The resource name of the private cloud to be - queried for HCX activation keys. Resource names are - schemeless URIs that follow the conventions in + Required. The resource name of the network policy to + query for external access firewall rules. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3058,9 +3244,9 @@ async def sample_list_hcx_activation_keys(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysAsyncPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAccessRulesAsyncPager: Response message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] Iterating over this object will yield results and resolve additional pages automatically. @@ -3076,7 +3262,7 @@ async def sample_list_hcx_activation_keys(): "the individual field arguments should be set." ) - request = vmwareengine.ListHcxActivationKeysRequest(request) + request = vmwareengine.ListExternalAccessRulesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3086,7 +3272,7 @@ async def sample_list_hcx_activation_keys(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_hcx_activation_keys, + self._client._transport.list_external_access_rules, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -3116,7 +3302,7 @@ async def sample_list_hcx_activation_keys(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListHcxActivationKeysAsyncPager( + response = pagers.ListExternalAccessRulesAsyncPager( method=rpc, request=request, response=response, @@ -3126,16 +3312,18 @@ async def sample_list_hcx_activation_keys(): # Done; return the response. return response - async def get_hcx_activation_key( + async def get_external_access_rule( self, - request: Optional[Union[vmwareengine.GetHcxActivationKeyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.GetExternalAccessRuleRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.HcxActivationKey: - r"""Retrieves a ``HcxActivationKey`` resource by its resource name. + ) -> vmwareengine_resources.ExternalAccessRule: + r"""Gets details of a single external access rule. .. code-block:: python @@ -3148,32 +3336,32 @@ async def get_hcx_activation_key( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_hcx_activation_key(): + async def sample_get_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetHcxActivationKeyRequest( + request = vmwareengine_v1.GetExternalAccessRuleRequest( name="name_value", ) # Make the request - response = await client.get_hcx_activation_key(request=request) + response = await client.get_external_access_rule(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetExternalAccessRuleRequest, dict]]): The request object. Request message for - [VmwareEngine.GetHcxActivationKeys][] + [VmwareEngine.GetExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule] name (:class:`str`): - Required. The resource name of the HCX activation key to - retrieve. Resource names are schemeless URIs that follow - the conventions in + Required. The resource name of the external access + firewall rule to retrieve. Resource names are schemeless + URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3185,15 +3373,9 @@ async def sample_get_hcx_activation_key(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.HcxActivationKey: - HCX activation key. A default key is created during - private cloud provisioning, but this behavior is - subject to change and you should always verify active - keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. + google.cloud.vmwareengine_v1.types.ExternalAccessRule: + External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. @@ -3206,7 +3388,7 @@ async def sample_get_hcx_activation_key(): "the individual field arguments should be set." ) - request = vmwareengine.GetHcxActivationKeyRequest(request) + request = vmwareengine.GetExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3216,7 +3398,7 @@ async def sample_get_hcx_activation_key(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_hcx_activation_key, + self._client._transport.get_external_access_rule, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -3247,16 +3429,23 @@ async def sample_get_hcx_activation_key(): # Done; return the response. return response - async def get_network_policy( + async def create_external_access_rule( self, - request: Optional[Union[vmwareengine.GetNetworkPolicyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.CreateExternalAccessRuleRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, + external_access_rule: Optional[ + vmwareengine_resources.ExternalAccessRule + ] = None, + external_access_rule_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NetworkPolicy: - r"""Retrieves a ``NetworkPolicy`` resource by its resource name. + ) -> operation_async.AsyncOperation: + r"""Creates a new external access rule in a given network + policy. .. code-block:: python @@ -3269,34 +3458,66 @@ async def get_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_network_policy(): + async def sample_create_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetNetworkPolicyRequest( - name="name_value", + request = vmwareengine_v1.CreateExternalAccessRuleRequest( + parent="parent_value", + external_access_rule_id="external_access_rule_id_value", ) # Make the request - response = await client.get_network_policy(request=request) + operation = client.create_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateExternalAccessRuleRequest, dict]]): The request object. Request message for - [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] - name (:class:`str`): + [VmwareEngine.CreateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule] + parent (:class:`str`): Required. The resource name of the network policy to - retrieve. Resource names are schemeless URIs that follow - the conventions in + create a new external access firewall rule in. Resource + names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` - This corresponds to the ``name`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_access_rule (:class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule`): + Required. The initial description of + a new external access rule. + + This corresponds to the ``external_access_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_access_rule_id (:class:`str`): + Required. The user-provided identifier of the + ``ExternalAccessRule`` to be created. This identifier + must be unique among ``ExternalAccessRule`` resources + within the parent and becomes the final token in the + name URI. The identifier must meet the following + requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``external_access_rule_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3306,58 +3527,48 @@ async def sample_get_network_policy(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.NetworkPolicy: - Represents a network policy resource. - Network policies are regional resources. - You can use a network policy to enable - or disable internet access and external - IP access. Network policies are - associated with a VMware Engine network, - which might span across regions. For a - given region, a network policy applies - to all private clouds in the VMware - Engine network associated with the - policy. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule` External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any( + [parent, external_access_rule, external_access_rule_id] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.GetNetworkPolicyRequest(request) + request = vmwareengine.CreateExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if external_access_rule is not None: + request.external_access_rule = external_access_rule + if external_access_rule_id is not None: + request.external_access_rule_id = external_access_rule_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_network_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.create_external_access_rule, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -3368,20 +3579,33 @@ async def sample_get_network_policy(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ExternalAccessRule, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - async def list_network_policies( + async def update_external_access_rule( self, - request: Optional[Union[vmwareengine.ListNetworkPoliciesRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.UpdateExternalAccessRuleRequest, dict] + ] = None, *, - parent: Optional[str] = None, + external_access_rule: Optional[ + vmwareengine_resources.ExternalAccessRule + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNetworkPoliciesAsyncPager: - r"""Lists ``NetworkPolicy`` resources in a specified project and - location. + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single external access rule. Only + fields specified in ``update_mask`` are applied. .. code-block:: python @@ -3394,34 +3618,45 @@ async def list_network_policies( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_network_policies(): + async def sample_update_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListNetworkPoliciesRequest( - parent="parent_value", + request = vmwareengine_v1.UpdateExternalAccessRuleRequest( ) # Make the request - page_result = client.list_network_policies(request=request) + operation = client.update_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateExternalAccessRuleRequest, dict]]): The request object. Request message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] - parent (:class:`str`): - Required. The resource name of the location (region) to - query for network policies. Resource names are - schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` + [VmwareEngine.UpdateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule] + external_access_rule (:class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule`): + Required. Description of the external + access rule. - This corresponds to the ``parent`` field + This corresponds to the ``external_access_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAccessRule`` resource by + the update. The fields specified in the ``update_mask`` + are relative to the resource, not the full request. A + field will be overwritten if it is in the mask. If the + user does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3431,52 +3666,46 @@ async def sample_list_network_policies(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesAsyncPager: - Response message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule` External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([external_access_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ListNetworkPoliciesRequest(request) + request = vmwareengine.UpdateExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if external_access_rule is not None: + request.external_access_rule = external_access_rule + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_network_policies, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.update_external_access_rule, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("external_access_rule.name", request.external_access_rule.name),) + ), ) # Send the request. @@ -3487,33 +3716,29 @@ async def sample_list_network_policies(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNetworkPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ExternalAccessRule, + metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def create_network_policy( + async def delete_external_access_rule( self, - request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeleteExternalAccessRuleRequest, dict] + ] = None, *, - parent: Optional[str] = None, - network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, - network_policy_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new network policy in a given VMware Engine - network of a project and location (region). A new - network policy cannot be created if another network - policy already exists in the same scope. + r"""Deletes a single external access rule. .. code-block:: python @@ -3526,22 +3751,17 @@ async def create_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_create_network_policy(): + async def sample_delete_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - network_policy = vmwareengine_v1.NetworkPolicy() - network_policy.edge_services_cidr = "edge_services_cidr_value" - - request = vmwareengine_v1.CreateNetworkPolicyRequest( - parent="parent_value", - network_policy_id="network_policy_id_value", - network_policy=network_policy, + request = vmwareengine_v1.DeleteExternalAccessRuleRequest( + name="name_value", ) # Make the request - operation = client.create_network_policy(request=request) + operation = client.delete_external_access_rule(request=request) print("Waiting for operation to complete...") @@ -3551,44 +3771,18 @@ async def sample_create_network_policy(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteExternalAccessRuleRequest, dict]]): The request object. Request message for - [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] - parent (:class:`str`): - Required. The resource name of the location (region) to - create the new network policy in. Resource names are - schemeless URIs that follow the conventions in + [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule] + name (:class:`str`): + Required. The resource name of the external access + firewall rule to delete. Resource names are schemeless + URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - network_policy (:class:`google.cloud.vmwareengine_v1.types.NetworkPolicy`): - Required. The network policy - configuration to use in the request. - - This corresponds to the ``network_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - network_policy_id (:class:`str`): - Required. The user-provided identifier of the network - policy to be created. This identifier must be unique - within parent - ``projects/{my-project}/locations/{us-central1}/networkPolicies`` - and becomes the final token in the name URI. The - identifier must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` - This corresponds to the ``network_policy_id`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3601,41 +3795,39 @@ async def sample_create_network_policy(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional - resources. You can use a network policy to enable or - disable internet access and external IP access. - Network policies are associated with a VMware Engine - network, which might span across regions. For a given - region, a network policy applies to all private - clouds in the VMware Engine network associated with - the policy. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, network_policy, network_policy_id]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.CreateNetworkPolicyRequest(request) + request = vmwareengine.DeleteExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if network_policy is not None: - request.network_policy = network_policy - if network_policy_id is not None: - request.network_policy_id = network_policy_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_network_policy, + self._client._transport.delete_external_access_rule, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -3643,7 +3835,7 @@ async def sample_create_network_policy(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -3658,36 +3850,24 @@ async def sample_create_network_policy(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.NetworkPolicy, + empty_pb2.Empty, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def update_network_policy( + async def list_logging_servers( self, - request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, + request: Optional[Union[vmwareengine.ListLoggingServersRequest, dict]] = None, *, - network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Modifies a ``NetworkPolicy`` resource. Only the following fields - can be updated: ``internet_access``, ``external_ip``, - ``edge_services_cidr``. Only fields specified in ``updateMask`` - are applied. When updating a network policy, the external IP - network service can only be disabled if there are no external IP - addresses present in the scope of the policy. Also, a - ``NetworkService`` cannot be updated when - ``NetworkService.state`` is set to ``RECONCILING``. - - During operation processing, the resource is temporarily in the - ``ACTIVE`` state before the operation fully completes. For that - period of time, you can't update the resource. Use the operation - status to determine when the processing fully completes. + ) -> pagers.ListLoggingServersAsyncPager: + r"""Lists logging servers configured for a given private + cloud. .. code-block:: python @@ -3700,47 +3880,35 @@ async def update_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_update_network_policy(): + async def sample_list_logging_servers(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - network_policy = vmwareengine_v1.NetworkPolicy() - network_policy.edge_services_cidr = "edge_services_cidr_value" - - request = vmwareengine_v1.UpdateNetworkPolicyRequest( - network_policy=network_policy, + request = vmwareengine_v1.ListLoggingServersRequest( + parent="parent_value", ) # Make the request - operation = client.update_network_policy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + page_result = client.list_logging_servers(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPolicyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListLoggingServersRequest, dict]]): The request object. Request message for - [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] - network_policy (:class:`google.cloud.vmwareengine_v1.types.NetworkPolicy`): - Required. Network policy description. - This corresponds to the ``network_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the ``NetworkPolicy`` resource by the - update. The fields specified in the ``update_mask`` are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] + parent (:class:`str`): + Required. The resource name of the private cloud to be + queried for logging servers. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3750,52 +3918,52 @@ async def sample_update_network_policy(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListLoggingServersAsyncPager: + Response message for + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional - resources. You can use a network policy to enable or - disable internet access and external IP access. - Network policies are associated with a VMware Engine - network, which might span across regions. For a given - region, a network policy applies to all private - clouds in the VMware Engine network associated with - the policy. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network_policy, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.UpdateNetworkPolicyRequest(request) + request = vmwareengine.ListLoggingServersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if network_policy is not None: - request.network_policy = network_policy - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_network_policy, - default_timeout=None, + self._client._transport.list_logging_servers, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("network_policy.name", request.network_policy.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -3806,29 +3974,28 @@ async def sample_update_network_policy(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - vmwareengine_resources.NetworkPolicy, - metadata_type=vmwareengine.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLoggingServersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - async def delete_network_policy( + async def get_logging_server( self, - request: Optional[Union[vmwareengine.DeleteNetworkPolicyRequest, dict]] = None, + request: Optional[Union[vmwareengine.GetLoggingServerRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a ``NetworkPolicy`` resource. A network policy cannot be - deleted when ``NetworkService.state`` is set to ``RECONCILING`` - for either its external IP or internet access service. + ) -> vmwareengine_resources.LoggingServer: + r"""Gets details of a logging server. .. code-block:: python @@ -3841,36 +4008,32 @@ async def delete_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_delete_network_policy(): + async def sample_get_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.DeleteNetworkPolicyRequest( + request = vmwareengine_v1.GetLoggingServerRequest( name="name_value", ) # Make the request - operation = client.delete_network_policy(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + response = await client.get_logging_server(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetLoggingServerRequest, dict]]): The request object. Request message for - [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + [VmwareEngine.GetLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer] name (:class:`str`): - Required. The resource name of the network policy to - delete. Resource names are schemeless URIs that follow + Required. The resource name of the Logging Server to + retrieve. Resource names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3882,19 +4045,9 @@ async def sample_delete_network_policy(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } + google.cloud.vmwareengine_v1.types.LoggingServer: + Logging server to receive vCenter or + ESXi logs. """ # Create or coerce a protobuf request object. @@ -3907,7 +4060,7 @@ async def sample_delete_network_policy(): "the individual field arguments should be set." ) - request = vmwareengine.DeleteNetworkPolicyRequest(request) + request = vmwareengine.GetLoggingServerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3917,8 +4070,17 @@ async def sample_delete_network_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_network_policy, - default_timeout=None, + self._client._transport.get_logging_server, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -3936,34 +4098,22 @@ async def sample_delete_network_policy(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=vmwareengine.OperationMetadata, - ) - # Done; return the response. return response - async def create_vmware_engine_network( + async def create_logging_server( self, - request: Optional[ - Union[vmwareengine.CreateVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.CreateLoggingServerRequest, dict]] = None, *, parent: Optional[str] = None, - vmware_engine_network: Optional[ - vmwareengine_resources.VmwareEngineNetwork - ] = None, - vmware_engine_network_id: Optional[str] = None, + logging_server: Optional[vmwareengine_resources.LoggingServer] = None, + logging_server_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new VMware Engine network that can be used - by a private cloud. + r"""Create a new logging server for a given private + cloud. .. code-block:: python @@ -3976,22 +4126,25 @@ async def create_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_create_vmware_engine_network(): + async def sample_create_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" - request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( + request = vmwareengine_v1.CreateLoggingServerRequest( parent="parent_value", - vmware_engine_network_id="vmware_engine_network_id_value", - vmware_engine_network=vmware_engine_network, + logging_server=logging_server, + logging_server_id="logging_server_id_value", ) # Make the request - operation = client.create_vmware_engine_network(request=request) + operation = client.create_logging_server(request=request) print("Waiting for operation to complete...") @@ -4001,40 +4154,34 @@ async def sample_create_vmware_engine_network(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateLoggingServerRequest, dict]]): The request object. Request message for - [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] + [VmwareEngine.CreateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer] parent (:class:`str`): - Required. The resource name of the location to create - the new VMware Engine network in. A VMware Engine - network of type ``LEGACY`` is a regional resource, and a - VMware Engine network of type ``STANDARD`` is a global - resource. Resource names are schemeless URIs that follow - the conventions in + Required. The resource name of the private cloud to + create a new Logging Server in. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/global`` + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - vmware_engine_network (:class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork`): + logging_server (:class:`google.cloud.vmwareengine_v1.types.LoggingServer`): Required. The initial description of - the new VMware Engine network. + a new logging server. - This corresponds to the ``vmware_engine_network`` field + This corresponds to the ``logging_server`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - vmware_engine_network_id (:class:`str`): - Required. The user-provided identifier of the new VMware - Engine network. This identifier must be unique among - VMware Engine network resources within the parent and - becomes the final token in the name URI. The identifier - must meet the following requirements: + logging_server_id (:class:`str`): + Required. The user-provided identifier of the + ``LoggingServer`` to be created. This identifier must be + unique among ``LoggingServer`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: - - For networks of type LEGACY, adheres to the format: - ``{region-id}-default``. Replace ``{region-id}`` with - the region where you want to create the VMware Engine - network. For example, "us-central1-default". - Only contains 1-63 alphanumeric characters and hyphens - Begins with an alphabetical character @@ -4044,7 +4191,7 @@ async def sample_create_vmware_engine_network(): 1034 `__ (section 3.5) - This corresponds to the ``vmware_engine_network_id`` field + This corresponds to the ``logging_server_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4057,37 +4204,36 @@ async def sample_create_vmware_engine_network(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.LoggingServer` + Logging server to receive vCenter or ESXi logs. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, vmware_engine_network, vmware_engine_network_id] - ) + has_flattened_params = any([parent, logging_server, logging_server_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.CreateVmwareEngineNetworkRequest(request) + request = vmwareengine.CreateLoggingServerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if vmware_engine_network is not None: - request.vmware_engine_network = vmware_engine_network - if vmware_engine_network_id is not None: - request.vmware_engine_network_id = vmware_engine_network_id + if logging_server is not None: + request.logging_server = logging_server + if logging_server_id is not None: + request.logging_server_id = logging_server_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_vmware_engine_network, + self._client._transport.create_logging_server, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4110,30 +4256,25 @@ async def sample_create_vmware_engine_network(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.VmwareEngineNetwork, + vmwareengine_resources.LoggingServer, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def update_vmware_engine_network( + async def update_logging_server( self, - request: Optional[ - Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.UpdateLoggingServerRequest, dict]] = None, *, - vmware_engine_network: Optional[ - vmwareengine_resources.VmwareEngineNetwork - ] = None, + logging_server: Optional[vmwareengine_resources.LoggingServer] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Modifies a VMware Engine network resource. Only the following - fields can be updated: ``description``. Only fields specified in - ``updateMask`` are applied. + r"""Updates the parameters of a single logging server. Only fields + specified in ``update_mask`` are applied. .. code-block:: python @@ -4146,20 +4287,23 @@ async def update_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_update_vmware_engine_network(): + async def sample_update_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" - - request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( - vmware_engine_network=vmware_engine_network, + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.UpdateLoggingServerRequest( + logging_server=logging_server, ) # Make the request - operation = client.update_vmware_engine_network(request=request) + operation = client.update_logging_server(request=request) print("Waiting for operation to complete...") @@ -4169,25 +4313,22 @@ async def sample_update_vmware_engine_network(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateVmwareEngineNetworkRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateLoggingServerRequest, dict]]): The request object. Request message for - [VmwareEngine.UpdateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.UpdateVmwareEngineNetwork] - vmware_engine_network (:class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork`): - Required. VMware Engine network - description. - - This corresponds to the ``vmware_engine_network`` field + [VmwareEngine.UpdateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer] + logging_server (:class:`google.cloud.vmwareengine_v1.types.LoggingServer`): + Required. Logging server description. + This corresponds to the ``logging_server`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to specify the fields to be - overwritten in the VMware Engine network resource by the + overwritten in the ``LoggingServer`` resource by the update. The fields specified in the ``update_mask`` are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user does not provide a mask then all fields will be - overwritten. Only the following fields can be updated: - ``description``. + overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -4202,33 +4343,34 @@ async def sample_update_vmware_engine_network(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.LoggingServer` + Logging server to receive vCenter or ESXi logs. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([vmware_engine_network, update_mask]) + has_flattened_params = any([logging_server, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.UpdateVmwareEngineNetworkRequest(request) + request = vmwareengine.UpdateLoggingServerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if vmware_engine_network is not None: - request.vmware_engine_network = vmware_engine_network + if logging_server is not None: + request.logging_server = logging_server if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_vmware_engine_network, + self._client._transport.update_logging_server, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4237,7 +4379,7 @@ async def sample_update_vmware_engine_network(): # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("vmware_engine_network.name", request.vmware_engine_network.name),) + (("logging_server.name", request.logging_server.name),) ), ) @@ -4253,28 +4395,23 @@ async def sample_update_vmware_engine_network(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.VmwareEngineNetwork, + vmwareengine_resources.LoggingServer, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def delete_vmware_engine_network( + async def delete_logging_server( self, - request: Optional[ - Union[vmwareengine.DeleteVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.DeleteLoggingServerRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a ``VmwareEngineNetwork`` resource. You can only delete - a VMware Engine network after all resources that refer to it are - deleted. For example, a private cloud, a network peering, and a - network policy can all refer to the same VMware Engine network. + r"""Deletes a single logging server. .. code-block:: python @@ -4287,38 +4424,4653 @@ async def delete_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_delete_vmware_engine_network(): + async def sample_delete_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.DeleteVmwareEngineNetworkRequest( + request = vmwareengine_v1.DeleteLoggingServerRequest( name="name_value", ) # Make the request - operation = client.delete_vmware_engine_network(request=request) + operation = client.delete_logging_server(request=request) print("Waiting for operation to complete...") response = (await operation).result() - # Handle the response - print(response) + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteLoggingServerRequest, dict]]): + The request object. Request message for + [VmwareEngine.DeleteLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer] + name (:class:`str`): + Required. The resource name of the logging server to + delete. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.DeleteLoggingServerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_logging_server, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_node_types( + self, + request: Optional[Union[vmwareengine.ListNodeTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNodeTypesAsyncPager: + r"""Lists node types + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_node_types(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNodeTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_node_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNodeTypesRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + parent (:class:`str`): + Required. The resource name of the location to be + queried for node types. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1-a`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesAsyncPager: + Response message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListNodeTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_node_types, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNodeTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_node_type( + self, + request: Optional[Union[vmwareengine.GetNodeTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NodeType: + r"""Gets details of a single ``NodeType``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_node_type(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNodeTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_node_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNodeTypeRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + name (:class:`str`): + Required. The resource name of the node type to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NodeType: + Describes node type. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetNodeTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_node_type, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def show_nsx_credentials( + self, + request: Optional[Union[vmwareengine.ShowNsxCredentialsRequest, dict]] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Credentials: + r"""Gets details of credentials for NSX appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_show_nsx_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ShowNsxCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + response = await client.show_nsx_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] + private_cloud (:class:`str`): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.Credentials: + Credentials for a private cloud. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ShowNsxCredentialsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.show_nsx_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def show_vcenter_credentials( + self, + request: Optional[ + Union[vmwareengine.ShowVcenterCredentialsRequest, dict] + ] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Credentials: + r"""Gets details of credentials for Vcenter appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_show_vcenter_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ShowVcenterCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + response = await client.show_vcenter_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] + private_cloud (:class:`str`): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.Credentials: + Credentials for a private cloud. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ShowVcenterCredentialsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.show_vcenter_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reset_nsx_credentials( + self, + request: Optional[Union[vmwareengine.ResetNsxCredentialsRequest, dict]] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resets credentials of the NSX appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_reset_nsx_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ResetNsxCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + operation = client.reset_nsx_credentials(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] + private_cloud (:class:`str`): + Required. The resource name of the private cloud to + reset credentials for. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ResetNsxCredentialsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_nsx_credentials, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.PrivateCloud, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def reset_vcenter_credentials( + self, + request: Optional[ + Union[vmwareengine.ResetVcenterCredentialsRequest, dict] + ] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resets credentials of the Vcenter appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_reset_vcenter_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ResetVcenterCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + operation = client.reset_vcenter_credentials(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] + private_cloud (:class:`str`): + Required. The resource name of the private cloud to + reset credentials for. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ResetVcenterCredentialsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_vcenter_credentials, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.PrivateCloud, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_dns_forwarding( + self, + request: Optional[Union[vmwareengine.GetDnsForwardingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.DnsForwarding: + r"""Gets details of the ``DnsForwarding`` config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsForwardingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_forwarding(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetDnsForwardingRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding] + name (:class:`str`): + Required. The resource name of a ``DnsForwarding`` to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/dnsForwarding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.DnsForwarding: + DNS forwarding config. + This config defines a list of domain to + name server mappings, and is attached to + the private cloud for custom domain + resolution. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetDnsForwardingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dns_forwarding, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dns_forwarding( + self, + request: Optional[Union[vmwareengine.UpdateDnsForwardingRequest, dict]] = None, + *, + dns_forwarding: Optional[vmwareengine_resources.DnsForwarding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of the ``DnsForwarding`` config, like + associated domains. Only fields specified in ``update_mask`` are + applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + dns_forwarding = vmwareengine_v1.DnsForwarding() + dns_forwarding.forwarding_rules.domain = "domain_value" + dns_forwarding.forwarding_rules.name_servers = ['name_servers_value1', 'name_servers_value2'] + + request = vmwareengine_v1.UpdateDnsForwardingRequest( + dns_forwarding=dns_forwarding, + ) + + # Make the request + operation = client.update_dns_forwarding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateDnsForwardingRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding] + dns_forwarding (:class:`google.cloud.vmwareengine_v1.types.DnsForwarding`): + Required. DnsForwarding config + details. + + This corresponds to the ``dns_forwarding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``DnsForwarding`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsForwarding` DNS forwarding config. + This config defines a list of domain to name server + mappings, and is attached to the private cloud for + custom domain resolution. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dns_forwarding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.UpdateDnsForwardingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_forwarding is not None: + request.dns_forwarding = dns_forwarding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_dns_forwarding, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_forwarding.name", request.dns_forwarding.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.DnsForwarding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_network_peering( + self, + request: Optional[Union[vmwareengine.GetNetworkPeeringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPeering: + r"""Retrieves a ``NetworkPeering`` resource by its resource name. + The resource contains details of the network peering, such as + peered networks, import and export custom route configurations, + and peering state. NetworkPeering is a global resource and + location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_network_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNetworkPeeringRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering] + name (:class:`str`): + Required. The resource name of the network peering to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NetworkPeering: + Details of a network peering. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetNetworkPeeringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_network_peering, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_network_peerings( + self, + request: Optional[Union[vmwareengine.ListNetworkPeeringsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkPeeringsAsyncPager: + r"""Lists ``NetworkPeering`` resources in a given project. + NetworkPeering is a global resource and location can only be + global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_network_peerings(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPeeringsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_peerings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + parent (:class:`str`): + Required. The resource name of the location (global) to + query for network peerings. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPeeringsAsyncPager: + Response message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListNetworkPeeringsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_network_peerings, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNetworkPeeringsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_network_peering( + self, + request: Optional[Union[vmwareengine.CreateNetworkPeeringRequest, dict]] = None, + *, + parent: Optional[str] = None, + network_peering: Optional[vmwareengine_resources.NetworkPeering] = None, + network_peering_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new network peering between the peer network and + VMware Engine network provided in a ``NetworkPeering`` resource. + NetworkPeering is a global resource and location can only be + global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.CreateNetworkPeeringRequest( + parent="parent_value", + network_peering_id="network_peering_id_value", + network_peering=network_peering, + ) + + # Make the request + operation = client.create_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateNetworkPeeringRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering] + parent (:class:`str`): + Required. The resource name of the location to create + the new network peering in. This value is always + ``global``, because ``NetworkPeering`` is a global + resource. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_peering (:class:`google.cloud.vmwareengine_v1.types.NetworkPeering`): + Required. The initial description of + the new network peering. + + This corresponds to the ``network_peering`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_peering_id (:class:`str`): + Required. The user-provided identifier of the new + ``NetworkPeering``. This identifier must be unique among + ``NetworkPeering`` resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``network_peering_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.NetworkPeering` + Details of a network peering. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, network_peering, network_peering_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.CreateNetworkPeeringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if network_peering is not None: + request.network_peering = network_peering + if network_peering_id is not None: + request.network_peering_id = network_peering_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_network_peering, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.NetworkPeering, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_network_peering( + self, + request: Optional[Union[vmwareengine.DeleteNetworkPeeringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``NetworkPeering`` resource. When a network peering is + deleted for a VMware Engine network, the peer network becomes + inaccessible to that VMware Engine network. NetworkPeering is a + global resource and location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_delete_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPeeringRequest, dict]]): + The request object. Request message for + [VmwareEngine.DeleteNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering] + name (:class:`str`): + Required. The resource name of the network peering to be + deleted. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.DeleteNetworkPeeringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_network_peering, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_network_peering( + self, + request: Optional[Union[vmwareengine.UpdateNetworkPeeringRequest, dict]] = None, + *, + network_peering: Optional[vmwareengine_resources.NetworkPeering] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies a ``NetworkPeering`` resource. Only the ``description`` + field can be updated. Only fields specified in ``updateMask`` + are applied. NetworkPeering is a global resource and location + can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.UpdateNetworkPeeringRequest( + network_peering=network_peering, + ) + + # Make the request + operation = client.update_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPeeringRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering] + network_peering (:class:`google.cloud.vmwareengine_v1.types.NetworkPeering`): + Required. Network peering + description. + + This corresponds to the ``network_peering`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPeering`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.NetworkPeering` + Details of a network peering. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network_peering, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.UpdateNetworkPeeringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network_peering is not None: + request.network_peering = network_peering + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_network_peering, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network_peering.name", request.network_peering.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.NetworkPeering, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_peering_routes( + self, + request: Optional[Union[vmwareengine.ListPeeringRoutesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPeeringRoutesAsyncPager: + r"""Lists the network peering routes exchanged over a + peering connection. NetworkPeering is a global resource + and location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_peering_routes(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListPeeringRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + parent (:class:`str`): + Required. The resource name of the network peering to + retrieve peering routes from. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPeeringRoutesAsyncPager: + Response message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListPeeringRoutesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_peering_routes, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPeeringRoutesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_hcx_activation_key( + self, + request: Optional[ + Union[vmwareengine.CreateHcxActivationKeyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, + hcx_activation_key_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new HCX activation key in a given private + cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_hcx_activation_key(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateHcxActivationKeyRequest( + parent="parent_value", + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + # Make the request + operation = client.create_hcx_activation_key(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + parent (:class:`str`): + Required. The resource name of the private cloud to + create the key for. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hcx_activation_key (:class:`google.cloud.vmwareengine_v1.types.HcxActivationKey`): + Required. The initial description of + a new HCX activation key. When creating + a new key, this field must be an empty + object. + + This corresponds to the ``hcx_activation_key`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hcx_activation_key_id (:class:`str`): + Required. The user-provided identifier of the + ``HcxActivationKey`` to be created. This identifier must + be unique among ``HcxActivationKey`` resources within + the parent and becomes the final token in the name URI. + The identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``hcx_activation_key_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.HcxActivationKey` HCX activation key. A default key is created during + private cloud provisioning, but this behavior is + subject to change and you should always verify active + keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hcx_activation_key, hcx_activation_key_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.CreateHcxActivationKeyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hcx_activation_key is not None: + request.hcx_activation_key = hcx_activation_key + if hcx_activation_key_id is not None: + request.hcx_activation_key_id = hcx_activation_key_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_hcx_activation_key, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.HcxActivationKey, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_hcx_activation_keys( + self, + request: Optional[ + Union[vmwareengine.ListHcxActivationKeysRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHcxActivationKeysAsyncPager: + r"""Lists ``HcxActivationKey`` resources in a given private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_hcx_activation_keys(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListHcxActivationKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hcx_activation_keys(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + parent (:class:`str`): + Required. The resource name of the private cloud to be + queried for HCX activation keys. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysAsyncPager: + Response message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListHcxActivationKeysRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_hcx_activation_keys, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListHcxActivationKeysAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_hcx_activation_key( + self, + request: Optional[Union[vmwareengine.GetHcxActivationKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.HcxActivationKey: + r"""Retrieves a ``HcxActivationKey`` resource by its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_hcx_activation_key(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetHcxActivationKeyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_hcx_activation_key(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetHcxActivationKeys][] + name (:class:`str`): + Required. The resource name of the HCX activation key to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.HcxActivationKey: + HCX activation key. A default key is created during + private cloud provisioning, but this behavior is + subject to change and you should always verify active + keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetHcxActivationKeyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_hcx_activation_key, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_network_policy( + self, + request: Optional[Union[vmwareengine.GetNetworkPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPolicy: + r"""Retrieves a ``NetworkPolicy`` resource by its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + name (:class:`str`): + Required. The resource name of the network policy to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NetworkPolicy: + Represents a network policy resource. + Network policies are regional resources. + You can use a network policy to enable + or disable internet access and external + IP access. Network policies are + associated with a VMware Engine network, + which might span across regions. For a + given region, a network policy applies + to all private clouds in the VMware + Engine network associated with the + policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_network_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_network_policies( + self, + request: Optional[Union[vmwareengine.ListNetworkPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkPoliciesAsyncPager: + r"""Lists ``NetworkPolicy`` resources in a specified project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_network_policies(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + parent (:class:`str`): + Required. The resource name of the location (region) to + query for network policies. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesAsyncPager: + Response message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListNetworkPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_network_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNetworkPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_network_policy( + self, + request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, + network_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new network policy in a given VMware Engine + network of a project and location (region). A new + network policy cannot be created if another network + policy already exists in the same scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_policy = vmwareengine_v1.NetworkPolicy() + network_policy.edge_services_cidr = "edge_services_cidr_value" + + request = vmwareengine_v1.CreateNetworkPolicyRequest( + parent="parent_value", + network_policy_id="network_policy_id_value", + network_policy=network_policy, + ) + + # Make the request + operation = client.create_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + parent (:class:`str`): + Required. The resource name of the location (region) to + create the new network policy in. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (:class:`google.cloud.vmwareengine_v1.types.NetworkPolicy`): + Required. The network policy + configuration to use in the request. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy_id (:class:`str`): + Required. The user-provided identifier of the network + policy to be created. This identifier must be unique + within parent + ``projects/{my-project}/locations/{us-central1}/networkPolicies`` + and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``network_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional + resources. You can use a network policy to enable or + disable internet access and external IP access. + Network policies are associated with a VMware Engine + network, which might span across regions. For a given + region, a network policy applies to all private + clouds in the VMware Engine network associated with + the policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, network_policy, network_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.CreateNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if network_policy is not None: + request.network_policy = network_policy + if network_policy_id is not None: + request.network_policy_id = network_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_network_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.NetworkPolicy, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_network_policy( + self, + request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, + *, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies a ``NetworkPolicy`` resource. Only the following fields + can be updated: ``internet_access``, ``external_ip``, + ``edge_services_cidr``. Only fields specified in ``updateMask`` + are applied. When updating a network policy, the external IP + network service can only be disabled if there are no external IP + addresses present in the scope of the policy. Also, a + ``NetworkService`` cannot be updated when + ``NetworkService.state`` is set to ``RECONCILING``. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_policy = vmwareengine_v1.NetworkPolicy() + network_policy.edge_services_cidr = "edge_services_cidr_value" + + request = vmwareengine_v1.UpdateNetworkPolicyRequest( + network_policy=network_policy, + ) + + # Make the request + operation = client.update_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPolicyRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] + network_policy (:class:`google.cloud.vmwareengine_v1.types.NetworkPolicy`): + Required. Network policy description. + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPolicy`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional + resources. You can use a network policy to enable or + disable internet access and external IP access. + Network policies are associated with a VMware Engine + network, which might span across regions. For a given + region, a network policy applies to all private + clouds in the VMware Engine network associated with + the policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.UpdateNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network_policy is not None: + request.network_policy = network_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_network_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network_policy.name", request.network_policy.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.NetworkPolicy, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_network_policy( + self, + request: Optional[Union[vmwareengine.DeleteNetworkPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``NetworkPolicy`` resource. A network policy cannot be + deleted when ``NetworkService.state`` is set to ``RECONCILING`` + for either its external IP or internet access service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_delete_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest, dict]]): + The request object. Request message for + [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + name (:class:`str`): + Required. The resource name of the network policy to + delete. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.DeleteNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_network_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_management_dns_zone_bindings( + self, + request: Optional[ + Union[vmwareengine.ListManagementDnsZoneBindingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListManagementDnsZoneBindingsAsyncPager: + r"""Lists Consumer VPCs bound to Management DNS Zone of a + given private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_management_dns_zone_bindings(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListManagementDnsZoneBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_management_dns_zone_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + parent (:class:`str`): + Required. The resource name of the private cloud to be + queried for management DNS zone bindings. Resource names + are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListManagementDnsZoneBindingsAsyncPager: + Response message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListManagementDnsZoneBindingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_management_dns_zone_bindings, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListManagementDnsZoneBindingsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.GetManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.ManagementDnsZoneBinding: + r"""Retrieves a 'ManagementDnsZoneBinding' resource by + its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_management_dns_zone_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetManagementDnsZoneBindingRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding] + name (:class:`str`): + Required. The resource name of the management DNS zone + binding to retrieve. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding: + Represents a binding between a + network and the management DNS zone. A + management DNS zone is the Cloud DNS + cross-project binding zone that VMware + Engine creates for each private cloud. + It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi + hosts and management VM appliances like + vCenter and NSX Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetManagementDnsZoneBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_management_dns_zone_binding, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.CreateManagementDnsZoneBindingRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + management_dns_zone_binding: Optional[ + vmwareengine_resources.ManagementDnsZoneBinding + ] = None, + management_dns_zone_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new ``ManagementDnsZoneBinding`` resource in a private + cloud. This RPC creates the DNS binding and the resource that + represents the DNS binding of the consumer VPC network to the + management DNS zone. A management DNS zone is the Cloud DNS + cross-project binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP addresses + for the private cloud's ESXi hosts and management VM appliances + like vCenter and NSX Manager. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.CreateManagementDnsZoneBindingRequest( + parent="parent_value", + management_dns_zone_binding=management_dns_zone_binding, + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Make the request + operation = client.create_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateManagementDnsZoneBindingRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreateManagementDnsZoneBindings][] + parent (:class:`str`): + Required. The resource name of the private cloud to + create a new management DNS zone binding for. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_dns_zone_binding (:class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding`): + Required. The initial values for a + new management DNS zone binding. + + This corresponds to the ``management_dns_zone_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_dns_zone_binding_id (:class:`str`): + Required. The user-provided identifier of the + ``ManagementDnsZoneBinding`` resource to be created. + This identifier must be unique among + ``ManagementDnsZoneBinding`` resources within the parent + and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``management_dns_zone_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, management_dns_zone_binding, management_dns_zone_binding_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.CreateManagementDnsZoneBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if management_dns_zone_binding is not None: + request.management_dns_zone_binding = management_dns_zone_binding + if management_dns_zone_binding_id is not None: + request.management_dns_zone_binding_id = management_dns_zone_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_management_dns_zone_binding, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.UpdateManagementDnsZoneBindingRequest, dict] + ] = None, + *, + management_dns_zone_binding: Optional[ + vmwareengine_resources.ManagementDnsZoneBinding + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``ManagementDnsZoneBinding`` resource. Only fields + specified in ``update_mask`` are applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.UpdateManagementDnsZoneBindingRequest( + management_dns_zone_binding=management_dns_zone_binding, + ) + + # Make the request + operation = client.update_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateManagementDnsZoneBindingRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding] + management_dns_zone_binding (:class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding`): + Required. New values to update the + management DNS zone binding with. + + This corresponds to the ``management_dns_zone_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``ManagementDnsZoneBinding`` resource + by the update. The fields specified in the + ``update_mask`` are relative to the resource, not the + full request. A field will be overwritten if it is in + the mask. If the user does not provide a mask then all + fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([management_dns_zone_binding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.UpdateManagementDnsZoneBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if management_dns_zone_binding is not None: + request.management_dns_zone_binding = management_dns_zone_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_management_dns_zone_binding, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "management_dns_zone_binding.name", + request.management_dns_zone_binding.name, + ), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.DeleteManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``ManagementDnsZoneBinding`` resource. When a + management DNS zone binding is deleted, the corresponding + consumer VPC network is no longer bound to the management DNS + zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_delete_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteManagementDnsZoneBindingRequest, dict]]): + The request object. Request message for + [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding] + name (:class:`str`): + Required. The resource name of the management DNS zone + binding to delete. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.DeleteManagementDnsZoneBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_management_dns_zone_binding, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def repair_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.RepairManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Retries to create a ``ManagementDnsZoneBinding`` resource that + is in failed state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_repair_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.RepairManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.repair_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.RepairManagementDnsZoneBindingRequest, dict]]): + The request object. Request message for + [VmwareEngine.RepairManagementDnsZoneBindings][] + name (:class:`str`): + Required. The resource name of the management DNS zone + binding to repair. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.RepairManagementDnsZoneBindingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.repair_management_dns_zone_binding, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.CreateVmwareEngineNetworkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, + vmware_engine_network_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new VMware Engine network that can be used + by a private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() + vmware_engine_network.type_ = "STANDARD" + + request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( + parent="parent_value", + vmware_engine_network_id="vmware_engine_network_id_value", + vmware_engine_network=vmware_engine_network, + ) + + # Make the request + operation = client.create_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] + parent (:class:`str`): + Required. The resource name of the location to create + the new VMware Engine network in. A VMware Engine + network of type ``LEGACY`` is a regional resource, and a + VMware Engine network of type ``STANDARD`` is a global + resource. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vmware_engine_network (:class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork`): + Required. The initial description of + the new VMware Engine network. + + This corresponds to the ``vmware_engine_network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vmware_engine_network_id (:class:`str`): + Required. The user-provided identifier of the new VMware + Engine network. This identifier must be unique among + VMware Engine network resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: + + - For networks of type LEGACY, adheres to the format: + ``{region-id}-default``. Replace ``{region-id}`` with + the region where you want to create the VMware Engine + network. For example, "us-central1-default". + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``vmware_engine_network_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, vmware_engine_network, vmware_engine_network_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.CreateVmwareEngineNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vmware_engine_network is not None: + request.vmware_engine_network = vmware_engine_network + if vmware_engine_network_id is not None: + request.vmware_engine_network_id = vmware_engine_network_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_vmware_engine_network, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.VmwareEngineNetwork, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] + ] = None, + *, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies a VMware Engine network resource. Only the following + fields can be updated: ``description``. Only fields specified in + ``updateMask`` are applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_update_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() + vmware_engine_network.type_ = "STANDARD" + + request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( + vmware_engine_network=vmware_engine_network, + ) + + # Make the request + operation = client.update_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdateVmwareEngineNetworkRequest, dict]]): + The request object. Request message for + [VmwareEngine.UpdateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.UpdateVmwareEngineNetwork] + vmware_engine_network (:class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork`): + Required. VMware Engine network + description. + + This corresponds to the ``vmware_engine_network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the VMware Engine network resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. Only the following fields can be updated: + ``description``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vmware_engine_network, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.UpdateVmwareEngineNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vmware_engine_network is not None: + request.vmware_engine_network = vmware_engine_network + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_vmware_engine_network, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vmware_engine_network.name", request.vmware_engine_network.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.VmwareEngineNetwork, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.DeleteVmwareEngineNetworkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``VmwareEngineNetwork`` resource. You can only delete + a VMware Engine network after all resources that refer to it are + deleted. For example, a private cloud, a network peering, and a + network policy can all refer to the same VMware Engine network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_delete_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteVmwareEngineNetworkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest, dict]]): + The request object. Request message for + [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] + name (:class:`str`): + Required. The resource name of the VMware Engine network + to be deleted. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.DeleteVmwareEngineNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_vmware_engine_network, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.GetVmwareEngineNetworkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.VmwareEngineNetwork: + r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource + name. The resource contains details of the VMware Engine + network, such as its VMware Engine network type, peered networks + in a service project, and state (for example, ``CREATING``, + ``ACTIVE``, ``DELETING``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_get_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetVmwareEngineNetworkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vmware_engine_network(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest, dict]]): + The request object. Request message for + [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] + name (:class:`str`): + Required. The resource name of the VMware Engine network + to retrieve. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.VmwareEngineNetwork: + VMware Engine network resource that + provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.GetVmwareEngineNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_vmware_engine_network, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_vmware_engine_networks( + self, + request: Optional[ + Union[vmwareengine.ListVmwareEngineNetworksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVmwareEngineNetworksAsyncPager: + r"""Lists ``VmwareEngineNetwork`` resources in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_list_vmware_engine_networks(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListVmwareEngineNetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vmware_engine_networks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest, dict]]): + The request object. Request message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + parent (:class:`str`): + Required. The resource name of the location to query for + VMware Engine networks. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksAsyncPager: + Response message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vmwareengine.ListVmwareEngineNetworksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_vmware_engine_networks, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVmwareEngineNetworksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_private_connection( + self, + request: Optional[ + Union[vmwareengine.CreatePrivateConnectionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new private connection that can be used for + accessing private Clouds. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + async def sample_create_private_connection(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + private_connection = vmwareengine_v1.PrivateConnection() + private_connection.vmware_engine_network = "vmware_engine_network_value" + private_connection.type_ = "THIRD_PARTY_SERVICE" + private_connection.service_network = "service_network_value" + + request = vmwareengine_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest, dict]]): + The request object. Request message for + [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] + parent (:class:`str`): + Required. The resource name of the location to create + the new private connection in. Private connection is a + regional resource. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (:class:`google.cloud.vmwareengine_v1.types.PrivateConnection`): + Required. The initial description of + the new private connection. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (:class:`str`): + Required. The user-provided identifier of the new + private connection. This identifier must be unique among + private connection resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: - Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest, dict]]): - The request object. Request message for - [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] - name (:class:`str`): - Required. The resource name of the VMware Engine network - to be deleted. Resource names are schemeless URIs that - follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) - This corresponds to the ``name`` field + This corresponds to the ``private_connection_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4331,39 +9083,35 @@ async def sample_delete_vmware_engine_network(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateConnection` Private connection resource that provides connectivity for VMware Engine + private clouds. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, private_connection, private_connection_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.DeleteVmwareEngineNetworkRequest(request) + request = vmwareengine.CreatePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_vmware_engine_network, + self._client._transport.create_private_connection, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4371,7 +9119,7 @@ async def sample_delete_vmware_engine_network(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -4386,29 +9134,25 @@ async def sample_delete_vmware_engine_network(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty_pb2.Empty, + vmwareengine_resources.PrivateConnection, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def get_vmware_engine_network( + async def get_private_connection( self, - request: Optional[ - Union[vmwareengine.GetVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetPrivateConnectionRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.VmwareEngineNetwork: - r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource - name. The resource contains details of the VMware Engine - network, such as its VMware Engine network type, peered networks - in a service project, and state (for example, ``CREATING``, - ``ACTIVE``, ``DELETING``). + ) -> vmwareengine_resources.PrivateConnection: + r"""Retrieves a ``PrivateConnection`` resource by its resource name. + The resource contains details of the private connection, such as + connected network, routing mode and state. .. code-block:: python @@ -4421,32 +9165,32 @@ async def get_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_vmware_engine_network(): + async def sample_get_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetVmwareEngineNetworkRequest( + request = vmwareengine_v1.GetPrivateConnectionRequest( name="name_value", ) # Make the request - response = await client.get_vmware_engine_network(request=request) + response = await client.get_private_connection(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest, dict]]): The request object. Request message for - [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] + [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] name (:class:`str`): - Required. The resource name of the VMware Engine network - to retrieve. Resource names are schemeless URIs that - follow the conventions in + Required. The resource name of the private connection to + retrieve. Resource names are schemeless URIs that follow + the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + ``projects/my-project/locations/us-central1/privateConnections/my-connection`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4458,8 +9202,8 @@ async def sample_get_vmware_engine_network(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.VmwareEngineNetwork: - VMware Engine network resource that + google.cloud.vmwareengine_v1.types.PrivateConnection: + Private connection resource that provides connectivity for VMware Engine private clouds. @@ -4474,7 +9218,7 @@ async def sample_get_vmware_engine_network(): "the individual field arguments should be set." ) - request = vmwareengine.GetVmwareEngineNetworkRequest(request) + request = vmwareengine.GetPrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4484,7 +9228,7 @@ async def sample_get_vmware_engine_network(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_vmware_engine_network, + self._client._transport.get_private_connection, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -4515,18 +9259,18 @@ async def sample_get_vmware_engine_network(): # Done; return the response. return response - async def list_vmware_engine_networks( + async def list_private_connections( self, request: Optional[ - Union[vmwareengine.ListVmwareEngineNetworksRequest, dict] + Union[vmwareengine.ListPrivateConnectionsRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListVmwareEngineNetworksAsyncPager: - r"""Lists ``VmwareEngineNetwork`` resources in a given project and + ) -> pagers.ListPrivateConnectionsAsyncPager: + r"""Lists ``PrivateConnection`` resources in a given project and location. .. code-block:: python @@ -4540,32 +9284,32 @@ async def list_vmware_engine_networks( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_vmware_engine_networks(): + async def sample_list_private_connections(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListVmwareEngineNetworksRequest( + request = vmwareengine_v1.ListPrivateConnectionsRequest( parent="parent_value", ) # Make the request - page_result = client.list_vmware_engine_networks(request=request) + page_result = client.list_private_connections(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest, dict]]): The request object. Request message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] parent (:class:`str`): Required. The resource name of the location to query for - VMware Engine networks. Resource names are schemeless - URIs that follow the conventions in + private connections. Resource names are schemeless URIs + that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/global`` + example: ``projects/my-project/locations/us-central1`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -4577,9 +9321,9 @@ async def sample_list_vmware_engine_networks(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksAsyncPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsAsyncPager: Response message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] Iterating over this object will yield results and resolve additional pages automatically. @@ -4595,7 +9339,7 @@ async def sample_list_vmware_engine_networks(): "the individual field arguments should be set." ) - request = vmwareengine.ListVmwareEngineNetworksRequest(request) + request = vmwareengine.ListPrivateConnectionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4605,7 +9349,7 @@ async def sample_list_vmware_engine_networks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_vmware_engine_networks, + self._client._transport.list_private_connections, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -4635,7 +9379,7 @@ async def sample_list_vmware_engine_networks(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListVmwareEngineNetworksAsyncPager( + response = pagers.ListPrivateConnectionsAsyncPager( method=rpc, request=request, response=response, @@ -4645,21 +9389,21 @@ async def sample_list_vmware_engine_networks(): # Done; return the response. return response - async def create_private_connection( + async def update_private_connection( self, request: Optional[ - Union[vmwareengine.CreatePrivateConnectionRequest, dict] + Union[vmwareengine.UpdatePrivateConnectionRequest, dict] ] = None, *, - parent: Optional[str] = None, private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, - private_connection_id: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new private connection that can be used for - accessing private Clouds. + r"""Modifies a ``PrivateConnection`` resource. Only ``description`` + and ``routing_mode`` fields can be updated. Only fields + specified in ``updateMask`` are applied. .. code-block:: python @@ -4672,7 +9416,7 @@ async def create_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_create_private_connection(): + async def sample_update_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() @@ -4682,14 +9426,12 @@ async def sample_create_private_connection(): private_connection.type_ = "THIRD_PARTY_SERVICE" private_connection.service_network = "service_network_value" - request = vmwareengine_v1.CreatePrivateConnectionRequest( - parent="parent_value", - private_connection_id="private_connection_id_value", + request = vmwareengine_v1.UpdatePrivateConnectionRequest( private_connection=private_connection, ) # Make the request - operation = client.create_private_connection(request=request) + operation = client.update_private_connection(request=request) print("Waiting for operation to complete...") @@ -4699,44 +9441,26 @@ async def sample_create_private_connection(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdatePrivateConnectionRequest, dict]]): The request object. Request message for - [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] - parent (:class:`str`): - Required. The resource name of the location to create - the new private connection in. Private connection is a - regional resource. Resource names are schemeless URIs - that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + [VmwareEngine.UpdatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.UpdatePrivateConnection] private_connection (:class:`google.cloud.vmwareengine_v1.types.PrivateConnection`): - Required. The initial description of - the new private connection. + Required. Private connection + description. This corresponds to the ``private_connection`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - private_connection_id (:class:`str`): - Required. The user-provided identifier of the new - private connection. This identifier must be unique among - private connection resources within the parent and - becomes the final token in the name URI. The identifier - must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the ``PrivateConnection`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. - This corresponds to the ``private_connection_id`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -4756,28 +9480,26 @@ async def sample_create_private_connection(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, private_connection, private_connection_id]) + has_flattened_params = any([private_connection, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.CreatePrivateConnectionRequest(request) + request = vmwareengine.UpdatePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent if private_connection is not None: request.private_connection = private_connection - if private_connection_id is not None: - request.private_connection_id = private_connection_id + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_private_connection, + self._client._transport.update_private_connection, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4785,7 +9507,9 @@ async def sample_create_private_connection(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("private_connection.name", request.private_connection.name),) + ), ) # Send the request. @@ -4807,18 +9531,20 @@ async def sample_create_private_connection(): # Done; return the response. return response - async def get_private_connection( + async def delete_private_connection( self, - request: Optional[Union[vmwareengine.GetPrivateConnectionRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeletePrivateConnectionRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.PrivateConnection: - r"""Retrieves a ``PrivateConnection`` resource by its resource name. - The resource contains details of the private connection, such as - connected network, routing mode and state. + ) -> operation_async.AsyncOperation: + r"""Deletes a ``PrivateConnection`` resource. When a private + connection is deleted for a VMware Engine network, the connected + network becomes inaccessible to that VMware Engine network. .. code-block:: python @@ -4831,29 +9557,33 @@ async def get_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_get_private_connection(): + async def sample_delete_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.GetPrivateConnectionRequest( + request = vmwareengine_v1.DeletePrivateConnectionRequest( name="name_value", ) # Make the request - response = await client.get_private_connection(request=request) + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest, dict]]): The request object. Request message for - [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] + [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] name (:class:`str`): Required. The resource name of the private connection to - retrieve. Resource names are schemeless URIs that follow - the conventions in + be deleted. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1/privateConnections/my-connection`` @@ -4867,11 +9597,20 @@ async def sample_get_private_connection(): metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - Returns: - google.cloud.vmwareengine_v1.types.PrivateConnection: - Private connection resource that - provides connectivity for VMware Engine - private clouds. + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -4884,7 +9623,7 @@ async def sample_get_private_connection(): "the individual field arguments should be set." ) - request = vmwareengine.GetPrivateConnectionRequest(request) + request = vmwareengine.DeletePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -4894,17 +9633,8 @@ async def sample_get_private_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_private_connection, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.delete_private_connection, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4922,22 +9652,30 @@ async def sample_get_private_connection(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - async def list_private_connections( + async def list_private_connection_peering_routes( self, request: Optional[ - Union[vmwareengine.ListPrivateConnectionsRequest, dict] + Union[vmwareengine.ListPrivateConnectionPeeringRoutesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionsAsyncPager: - r"""Lists ``PrivateConnection`` resources in a given project and - location. + ) -> pagers.ListPrivateConnectionPeeringRoutesAsyncPager: + r"""Lists the private connection routes exchanged over a + peering connection. .. code-block:: python @@ -4950,32 +9688,33 @@ async def list_private_connections( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_private_connections(): + async def sample_list_private_connection_peering_routes(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListPrivateConnectionsRequest( + request = vmwareengine_v1.ListPrivateConnectionPeeringRoutesRequest( parent="parent_value", ) # Make the request - page_result = client.list_private_connections(request=request) + page_result = client.list_private_connection_peering_routes(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest, dict]]): The request object. Request message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] parent (:class:`str`): - Required. The resource name of the location to query for - private connections. Resource names are schemeless URIs - that follow the conventions in + Required. The resource name of the private connection to + retrieve peering routes from. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` + example: + ``projects/my-project/locations/us-west1/privateConnections/my-connection`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -4987,9 +9726,9 @@ async def sample_list_private_connections(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsAsyncPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesAsyncPager: Response message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] Iterating over this object will yield results and resolve additional pages automatically. @@ -5005,7 +9744,7 @@ async def sample_list_private_connections(): "the individual field arguments should be set." ) - request = vmwareengine.ListPrivateConnectionsRequest(request) + request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5015,7 +9754,7 @@ async def sample_list_private_connections(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_private_connections, + self._client._transport.list_private_connection_peering_routes, default_retry=retries.AsyncRetry( initial=1.0, maximum=10.0, @@ -5045,7 +9784,7 @@ async def sample_list_private_connections(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListPrivateConnectionsAsyncPager( + response = pagers.ListPrivateConnectionPeeringRoutesAsyncPager( method=rpc, request=request, response=response, @@ -5055,21 +9794,23 @@ async def sample_list_private_connections(): # Done; return the response. return response - async def update_private_connection( + async def grant_dns_bind_permission( self, request: Optional[ - Union[vmwareengine.UpdatePrivateConnectionRequest, dict] + Union[vmwareengine.GrantDnsBindPermissionRequest, dict] ] = None, *, - private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, + principal: Optional[vmwareengine_resources.Principal] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Modifies a ``PrivateConnection`` resource. Only ``description`` - and ``routing_mode`` fields can be updated. Only fields - specified in ``updateMask`` are applied. + r"""Grants the bind permission to the customer provided + principal(user / service account) to bind their DNS zone + with the intranet VPC associated with the project. + DnsBindPermission is a global resource and location can + only be global. .. code-block:: python @@ -5082,22 +9823,21 @@ async def update_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_update_private_connection(): + async def sample_grant_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - private_connection = vmwareengine_v1.PrivateConnection() - private_connection.vmware_engine_network = "vmware_engine_network_value" - private_connection.type_ = "THIRD_PARTY_SERVICE" - private_connection.service_network = "service_network_value" + principal = vmwareengine_v1.Principal() + principal.user = "user_value" - request = vmwareengine_v1.UpdatePrivateConnectionRequest( - private_connection=private_connection, + request = vmwareengine_v1.GrantDnsBindPermissionRequest( + name="name_value", + principal=principal, ) # Make the request - operation = client.update_private_connection(request=request) + operation = client.grant_dns_bind_permission(request=request) print("Waiting for operation to complete...") @@ -5107,26 +9847,30 @@ async def sample_update_private_connection(): print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.UpdatePrivateConnectionRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GrantDnsBindPermissionRequest, dict]]): The request object. Request message for - [VmwareEngine.UpdatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.UpdatePrivateConnection] - private_connection (:class:`google.cloud.vmwareengine_v1.types.PrivateConnection`): - Required. Private connection - description. + [VmwareEngine.GrantDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission] + name (:class:`str`): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` - This corresponds to the ``private_connection`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the ``PrivateConnection`` resource by the - update. The fields specified in the ``update_mask`` are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - This corresponds to the ``update_mask`` field + principal (:class:`google.cloud.vmwareengine_v1.types.Principal`): + Required. The consumer provided + user/service account which needs to be + granted permission to bind with the + intranet VPC corresponding to the + consumer project. + + This corresponds to the ``principal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -5139,33 +9883,34 @@ async def sample_update_private_connection(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateConnection` Private connection resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsBindPermission` DnsBindPermission resource that contains the accounts having the consumer DNS + bind permission on the corresponding intranet VPC of + the consumer project. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_connection, update_mask]) + has_flattened_params = any([name, principal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.UpdatePrivateConnectionRequest(request) + request = vmwareengine.GrantDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_connection is not None: - request.private_connection = private_connection - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name + if principal is not None: + request.principal = principal # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_private_connection, + self._client._transport.grant_dns_bind_permission, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -5173,9 +9918,7 @@ async def sample_update_private_connection(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_connection.name", request.private_connection.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -5190,27 +9933,26 @@ async def sample_update_private_connection(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - vmwareengine_resources.PrivateConnection, + vmwareengine_resources.DnsBindPermission, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - async def delete_private_connection( + async def get_dns_bind_permission( self, - request: Optional[ - Union[vmwareengine.DeletePrivateConnectionRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetDnsBindPermissionRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a ``PrivateConnection`` resource. When a private - connection is deleted for a VMware Engine network, the connected - network becomes inaccessible to that VMware Engine network. + ) -> vmwareengine_resources.DnsBindPermission: + r"""Gets all the principals having bind permission on the + intranet VPC associated with the consumer project + granted by the Grant API. DnsBindPermission is a global + resource and location can only be global. .. code-block:: python @@ -5223,36 +9965,34 @@ async def delete_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_delete_private_connection(): + async def sample_get_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.DeletePrivateConnectionRequest( + request = vmwareengine_v1.GetDnsBindPermissionRequest( name="name_value", ) # Make the request - operation = client.delete_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + response = await client.get_dns_bind_permission(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.GetDnsBindPermissionRequest, dict]]): The request object. Request message for - [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] + [VmwareEngine.GetDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission] name (:class:`str`): - Required. The resource name of the private connection to - be deleted. Resource names are schemeless URIs that - follow the conventions in + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateConnections/my-connection`` + ``projects/my-project/locations/global/dnsBindPermission`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5264,19 +10004,12 @@ async def sample_delete_private_connection(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } + google.cloud.vmwareengine_v1.types.DnsBindPermission: + DnsBindPermission resource that + contains the accounts having the + consumer DNS bind permission on the + corresponding intranet VPC of the + consumer project. """ # Create or coerce a protobuf request object. @@ -5289,7 +10022,7 @@ async def sample_delete_private_connection(): "the individual field arguments should be set." ) - request = vmwareengine.DeletePrivateConnectionRequest(request) + request = vmwareengine.GetDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -5299,8 +10032,17 @@ async def sample_delete_private_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_private_connection, - default_timeout=None, + self._client._transport.get_dns_bind_permission, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -5318,30 +10060,26 @@ async def sample_delete_private_connection(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=vmwareengine.OperationMetadata, - ) - # Done; return the response. return response - async def list_private_connection_peering_routes( + async def revoke_dns_bind_permission( self, request: Optional[ - Union[vmwareengine.ListPrivateConnectionPeeringRoutesRequest, dict] + Union[vmwareengine.RevokeDnsBindPermissionRequest, dict] ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + principal: Optional[vmwareengine_resources.Principal] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionPeeringRoutesAsyncPager: - r"""Lists the private connection routes exchanged over a - peering connection. + ) -> operation_async.AsyncOperation: + r"""Revokes the bind permission from the customer + provided principal(user / service account) on the + intranet VPC associated with the consumer project. + DnsBindPermission is a global resource and location can + only be global. .. code-block:: python @@ -5354,35 +10092,54 @@ async def list_private_connection_peering_routes( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - async def sample_list_private_connection_peering_routes(): + async def sample_revoke_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineAsyncClient() # Initialize request argument(s) - request = vmwareengine_v1.ListPrivateConnectionPeeringRoutesRequest( - parent="parent_value", + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.RevokeDnsBindPermissionRequest( + name="name_value", + principal=principal, ) # Make the request - page_result = client.list_private_connection_peering_routes(request=request) + operation = client.revoke_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest, dict]]): + request (Optional[Union[google.cloud.vmwareengine_v1.types.RevokeDnsBindPermissionRequest, dict]]): The request object. Request message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] - parent (:class:`str`): - Required. The resource name of the private connection to - retrieve peering routes from. Resource names are - schemeless URIs that follow the conventions in + [VmwareEngine.RevokeDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission] + name (:class:`str`): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-west1/privateConnections/my-connection`` + ``projects/my-project/locations/global/dnsBindPermission`` - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + principal (:class:`google.cloud.vmwareengine_v1.types.Principal`): + Required. The consumer provided + user/service account which needs to be + granted permission to bind with the + intranet VPC corresponding to the + consumer project. + + This corresponds to the ``principal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -5392,52 +10149,45 @@ async def sample_list_private_connection_peering_routes(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesAsyncPager: - Response message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsBindPermission` DnsBindPermission resource that contains the accounts having the consumer DNS + bind permission on the corresponding intranet VPC of + the consumer project. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, principal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest(request) + request = vmwareengine.RevokeDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if principal is not None: + request.principal = principal # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_private_connection_peering_routes, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, + self._client._transport.revoke_dns_bind_permission, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -5448,13 +10198,12 @@ async def sample_list_private_connection_peering_routes(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPrivateConnectionPeeringRoutesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + vmwareengine_resources.DnsBindPermission, + metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py index 3d353bb4673b..06d5b7d7f494 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/client.py @@ -209,6 +209,96 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def dns_bind_permission_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified dns_bind_permission string.""" + return "projects/{project}/locations/{location}/dnsBindPermission".format( + project=project, + location=location, + ) + + @staticmethod + def parse_dns_bind_permission_path(path: str) -> Dict[str, str]: + """Parses a dns_bind_permission path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dnsBindPermission$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def dns_forwarding_path( + project: str, + location: str, + private_cloud: str, + ) -> str: + """Returns a fully-qualified dns_forwarding string.""" + return "projects/{project}/locations/{location}/privateClouds/{private_cloud}/dnsForwarding".format( + project=project, + location=location, + private_cloud=private_cloud, + ) + + @staticmethod + def parse_dns_forwarding_path(path: str) -> Dict[str, str]: + """Parses a dns_forwarding path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateClouds/(?P.+?)/dnsForwarding$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def external_access_rule_path( + project: str, + location: str, + network_policy: str, + external_access_rule: str, + ) -> str: + """Returns a fully-qualified external_access_rule string.""" + return "projects/{project}/locations/{location}/networkPolicies/{network_policy}/externalAccessRules/{external_access_rule}".format( + project=project, + location=location, + network_policy=network_policy, + external_access_rule=external_access_rule, + ) + + @staticmethod + def parse_external_access_rule_path(path: str) -> Dict[str, str]: + """Parses a external_access_rule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/networkPolicies/(?P.+?)/externalAccessRules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def external_address_path( + project: str, + location: str, + private_cloud: str, + external_address: str, + ) -> str: + """Returns a fully-qualified external_address string.""" + return "projects/{project}/locations/{location}/privateClouds/{private_cloud}/externalAddresses/{external_address}".format( + project=project, + location=location, + private_cloud=private_cloud, + external_address=external_address, + ) + + @staticmethod + def parse_external_address_path(path: str) -> Dict[str, str]: + """Parses a external_address path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateClouds/(?P.+?)/externalAddresses/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def hcx_activation_key_path( project: str, @@ -233,6 +323,54 @@ def parse_hcx_activation_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def logging_server_path( + project: str, + location: str, + private_cloud: str, + logging_server: str, + ) -> str: + """Returns a fully-qualified logging_server string.""" + return "projects/{project}/locations/{location}/privateClouds/{private_cloud}/loggingServers/{logging_server}".format( + project=project, + location=location, + private_cloud=private_cloud, + logging_server=logging_server, + ) + + @staticmethod + def parse_logging_server_path(path: str) -> Dict[str, str]: + """Parses a logging_server path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateClouds/(?P.+?)/loggingServers/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def management_dns_zone_binding_path( + project: str, + location: str, + private_cloud: str, + management_dns_zone_binding: str, + ) -> str: + """Returns a fully-qualified management_dns_zone_binding string.""" + return "projects/{project}/locations/{location}/privateClouds/{private_cloud}/managementDnsZoneBindings/{management_dns_zone_binding}".format( + project=project, + location=location, + private_cloud=private_cloud, + management_dns_zone_binding=management_dns_zone_binding, + ) + + @staticmethod + def parse_management_dns_zone_binding_path(path: str) -> Dict[str, str]: + """Parses a management_dns_zone_binding path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateClouds/(?P.+?)/managementDnsZoneBindings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def network_path( project: str, @@ -252,6 +390,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def network_peering_path( + project: str, + location: str, + network_peering: str, + ) -> str: + """Returns a fully-qualified network_peering string.""" + return "projects/{project}/locations/{location}/networkPeerings/{network_peering}".format( + project=project, + location=location, + network_peering=network_peering, + ) + + @staticmethod + def parse_network_peering_path(path: str) -> Dict[str, str]: + """Parses a network_peering path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/networkPeerings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def network_policy_path( project: str, @@ -274,6 +434,32 @@ def parse_network_policy_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def node_path( + project: str, + location: str, + private_cloud: str, + cluster: str, + node: str, + ) -> str: + """Returns a fully-qualified node string.""" + return "projects/{project}/locations/{location}/privateClouds/{private_cloud}/clusters/{cluster}/nodes/{node}".format( + project=project, + location=location, + private_cloud=private_cloud, + cluster=cluster, + node=node, + ) + + @staticmethod + def parse_node_path(path: str) -> Dict[str, str]: + """Parses a node path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateClouds/(?P.+?)/clusters/(?P.+?)/nodes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def node_type_path( project: str, @@ -809,8 +995,9 @@ def sample_get_private_cloud(): Returns: google.cloud.vmwareengine_v1.types.PrivateCloud: - Represents a private cloud resource. - Private clouds are zonal resources. + Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -867,10 +1054,10 @@ def create_private_cloud( metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new ``PrivateCloud`` resource in a given project and - location. Private clouds can only be created in zones, regional - private clouds are not supported. - - Creating a private cloud also creates a `management + location. Private clouds of type ``STANDARD`` and + ``TIME_LIMITED`` are zonal resources, ``STRETCHED`` private + clouds are regional. Creating a private cloud also creates a + `management cluster `__ for that private cloud. @@ -960,10 +1147,9 @@ def sample_create_private_cloud(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1104,10 +1290,9 @@ def sample_update_private_cloud(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1247,10 +1432,9 @@ def sample_delete_private_cloud(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1373,10 +1557,9 @@ def sample_undelete_private_cloud(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. """ # Create or coerce a protobuf request object. @@ -1819,9 +2002,8 @@ def update_cluster( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Modifies a ``Cluster`` resource. Only the following fields can - be updated: ``node_type_configs.*.node_count``. Only fields - specified in ``updateMask`` are applied. + r"""Modifies a ``Cluster`` resource. Only fields specified in + ``updateMask`` are applied. During operation processing, the resource is temporarily in the ``ACTIVE`` state before the operation fully completes. For that @@ -2079,16 +2261,16 @@ def sample_delete_cluster(): # Done; return the response. return response - def list_subnets( + def list_nodes( self, - request: Optional[Union[vmwareengine.ListSubnetsRequest, dict]] = None, + request: Optional[Union[vmwareengine.ListNodesRequest, dict]] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSubnetsPager: - r"""Lists subnets in a given private cloud. + ) -> pagers.ListNodesPager: + r"""Lists nodes in a given cluster. .. code-block:: python @@ -2101,33 +2283,33 @@ def list_subnets( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_subnets(): + def sample_list_nodes(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListSubnetsRequest( + request = vmwareengine_v1.ListNodesRequest( parent="parent_value", ) # Make the request - page_result = client.list_subnets(request=request) + page_result = client.list_nodes(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListSubnetsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListNodesRequest, dict]): The request object. Request message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] parent (str): - Required. The resource name of the private cloud to be - queried for subnets. Resource names are schemeless URIs - that follow the conventions in + Required. The resource name of the cluster to be queried + for nodes. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2139,9 +2321,9 @@ def sample_list_subnets(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodesPager: Response message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] Iterating over this object will yield results and resolve additional pages automatically. @@ -2158,11 +2340,11 @@ def sample_list_subnets(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListSubnetsRequest. + # in a vmwareengine.ListNodesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListSubnetsRequest): - request = vmwareengine.ListSubnetsRequest(request) + if not isinstance(request, vmwareengine.ListNodesRequest): + request = vmwareengine.ListNodesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -2170,7 +2352,7 @@ def sample_list_subnets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_subnets] + rpc = self._transport._wrapped_methods[self._transport.list_nodes] # Certain fields should be provided within the metadata header; # add these here. @@ -2188,7 +2370,7 @@ def sample_list_subnets(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListSubnetsPager( + response = pagers.ListNodesPager( method=rpc, request=request, response=response, @@ -2198,16 +2380,16 @@ def sample_list_subnets(): # Done; return the response. return response - def get_subnet( + def get_node( self, - request: Optional[Union[vmwareengine.GetSubnetRequest, dict]] = None, + request: Optional[Union[vmwareengine.GetNodeRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Subnet: - r"""Gets details of a single subnet. + ) -> vmwareengine_resources.Node: + r"""Gets details of a single node. .. code-block:: python @@ -2220,32 +2402,29 @@ def get_subnet( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_subnet(): + def sample_get_node(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetSubnetRequest( + request = vmwareengine_v1.GetNodeRequest( name="name_value", ) # Make the request - response = client.get_subnet(request=request) + response = client.get_node(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetSubnetRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetNodeRequest, dict]): The request object. Request message for - [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + [VmwareEngine.GetNode][google.cloud.vmwareengine.v1.VmwareEngine.GetNode] name (str): - Required. The resource name of the subnet to retrieve. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For + Required. The resource name of the node to retrieve. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` + ``projects/{project}/locations/{location}/privateClouds/{private_cloud}/clusters/{cluster}/nodes/{node}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2257,11 +2436,8 @@ def sample_get_subnet(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.Subnet: - Subnet in a private cloud. Either management subnets (such as vMotion) that - are read-only, or userDefined, which can also be - updated. - + google.cloud.vmwareengine_v1.types.Node: + Node in a cluster. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2274,11 +2450,11 @@ def sample_get_subnet(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetSubnetRequest. + # in a vmwareengine.GetNodeRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetSubnetRequest): - request = vmwareengine.GetSubnetRequest(request) + if not isinstance(request, vmwareengine.GetNodeRequest): + request = vmwareengine.GetNodeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2286,7 +2462,7 @@ def sample_get_subnet(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_subnet] + rpc = self._transport._wrapped_methods[self._transport.get_node] # Certain fields should be provided within the metadata header; # add these here. @@ -2305,22 +2481,19 @@ def sample_get_subnet(): # Done; return the response. return response - def update_subnet( + def list_external_addresses( self, - request: Optional[Union[vmwareengine.UpdateSubnetRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.ListExternalAddressesRequest, dict] + ] = None, *, - subnet: Optional[vmwareengine_resources.Subnet] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single subnet. Only fields specified - in ``update_mask`` are applied. - - *Note*: This API is synchronous and always returns a successful - ``google.longrunning.Operation`` (LRO). The returned LRO will - only have ``done`` and ``response`` fields. + ) -> pagers.ListExternalAddressesPager: + r"""Lists external IP addresses assigned to VMware + workload VMs in a given private cloud. .. code-block:: python @@ -2333,42 +2506,35 @@ def update_subnet( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_update_subnet(): + def sample_list_external_addresses(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.UpdateSubnetRequest( + request = vmwareengine_v1.ListExternalAddressesRequest( + parent="parent_value", ) # Make the request - operation = client.update_subnet(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + page_result = client.list_external_addresses(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.UpdateSubnetRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest, dict]): The request object. Request message for - [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] - subnet (google.cloud.vmwareengine_v1.types.Subnet): - Required. Subnet description. - This corresponds to the ``subnet`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ``Subnet`` resource by the update. - The fields specified in the ``update_mask`` are relative - to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] + parent (str): + Required. The resource name of the private cloud to be + queried for external IP addresses. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2378,18 +2544,18 @@ def sample_update_subnet(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAddressesPager: + Response message for + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.Subnet` Subnet in a private cloud. Either management subnets (such as vMotion) that - are read-only, or userDefined, which can also be - updated. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subnet, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2397,28 +2563,24 @@ def sample_update_subnet(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.UpdateSubnetRequest. + # in a vmwareengine.ListExternalAddressesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.UpdateSubnetRequest): - request = vmwareengine.UpdateSubnetRequest(request) + if not isinstance(request, vmwareengine.ListExternalAddressesRequest): + request = vmwareengine.ListExternalAddressesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if subnet is not None: - request.subnet = subnet - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_subnet] + rpc = self._transport._wrapped_methods[self._transport.list_external_addresses] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("subnet.name", request.subnet.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2429,27 +2591,32 @@ def sample_update_subnet(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - vmwareengine_resources.Subnet, - metadata_type=vmwareengine.OperationMetadata, - ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExternalAddressesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) # Done; return the response. return response - def list_node_types( + def fetch_network_policy_external_addresses( self, - request: Optional[Union[vmwareengine.ListNodeTypesRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.FetchNetworkPolicyExternalAddressesRequest, dict] + ] = None, *, - parent: Optional[str] = None, + network_policy: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNodeTypesPager: - r"""Lists node types + ) -> pagers.FetchNetworkPolicyExternalAddressesPager: + r"""Lists external IP addresses assigned to VMware + workload VMs within the scope of the given network + policy. .. code-block:: python @@ -2462,34 +2629,35 @@ def list_node_types( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_node_types(): + def sample_fetch_network_policy_external_addresses(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListNodeTypesRequest( - parent="parent_value", + request = vmwareengine_v1.FetchNetworkPolicyExternalAddressesRequest( + network_policy="network_policy_value", ) # Make the request - page_result = client.list_node_types(request=request) + page_result = client.fetch_network_policy_external_addresses(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListNodeTypesRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest, dict]): The request object. Request message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] - parent (str): - Required. The resource name of the location to be - queried for node types. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] + network_policy (str): + Required. The resource name of the network policy to + query for assigned external IP addresses. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1-a`` + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` - This corresponds to the ``parent`` field + This corresponds to the ``network_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2499,9 +2667,9 @@ def sample_list_node_types(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.FetchNetworkPolicyExternalAddressesPager: Response message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] Iterating over this object will yield results and resolve additional pages automatically. @@ -2510,7 +2678,7 @@ def sample_list_node_types(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([network_policy]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2518,24 +2686,30 @@ def sample_list_node_types(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListNodeTypesRequest. + # in a vmwareengine.FetchNetworkPolicyExternalAddressesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListNodeTypesRequest): - request = vmwareengine.ListNodeTypesRequest(request) + if not isinstance( + request, vmwareengine.FetchNetworkPolicyExternalAddressesRequest + ): + request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if network_policy is not None: + request.network_policy = network_policy # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_node_types] + rpc = self._transport._wrapped_methods[ + self._transport.fetch_network_policy_external_addresses + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("network_policy", request.network_policy),) + ), ) # Send the request. @@ -2548,7 +2722,7 @@ def sample_list_node_types(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListNodeTypesPager( + response = pagers.FetchNetworkPolicyExternalAddressesPager( method=rpc, request=request, response=response, @@ -2558,16 +2732,16 @@ def sample_list_node_types(): # Done; return the response. return response - def get_node_type( + def get_external_address( self, - request: Optional[Union[vmwareengine.GetNodeTypeRequest, dict]] = None, + request: Optional[Union[vmwareengine.GetExternalAddressRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NodeType: - r"""Gets details of a single ``NodeType``. + ) -> vmwareengine_resources.ExternalAddress: + r"""Gets details of a single external IP address. .. code-block:: python @@ -2580,32 +2754,32 @@ def get_node_type( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_node_type(): + def sample_get_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetNodeTypeRequest( + request = vmwareengine_v1.GetExternalAddressRequest( name="name_value", ) # Make the request - response = client.get_node_type(request=request) + response = client.get_external_address(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetNodeTypeRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetExternalAddressRequest, dict]): The request object. Request message for - [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + [VmwareEngine.GetExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress] name (str): - Required. The resource name of the node type to - retrieve. Resource names are schemeless URIs that follow - the conventions in + Required. The resource name of the external IP address + to retrieve. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2617,8 +2791,11 @@ def sample_get_node_type(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.NodeType: - Describes node type. + google.cloud.vmwareengine_v1.types.ExternalAddress: + Represents an allocated external IP + address and its corresponding internal + IP address in a private cloud. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2631,11 +2808,11 @@ def sample_get_node_type(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetNodeTypeRequest. + # in a vmwareengine.GetExternalAddressRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetNodeTypeRequest): - request = vmwareengine.GetNodeTypeRequest(request) + if not isinstance(request, vmwareengine.GetExternalAddressRequest): + request = vmwareengine.GetExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2643,7 +2820,7 @@ def sample_get_node_type(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_node_type] + rpc = self._transport._wrapped_methods[self._transport.get_external_address] # Certain fields should be provided within the metadata header; # add these here. @@ -2662,16 +2839,23 @@ def sample_get_node_type(): # Done; return the response. return response - def show_nsx_credentials( + def create_external_address( self, - request: Optional[Union[vmwareengine.ShowNsxCredentialsRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.CreateExternalAddressRequest, dict] + ] = None, *, - private_cloud: Optional[str] = None, + parent: Optional[str] = None, + external_address: Optional[vmwareengine_resources.ExternalAddress] = None, + external_address_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Gets details of credentials for NSX appliance. + ) -> operation.Operation: + r"""Creates a new ``ExternalAddress`` resource in a given private + cloud. The network policy that corresponds to the private cloud + must have the external IP address network service enabled + (``NetworkPolicy.external_ip``). .. code-block:: python @@ -2684,34 +2868,65 @@ def show_nsx_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_show_nsx_credentials(): + def sample_create_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ShowNsxCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.CreateExternalAddressRequest( + parent="parent_value", + external_address_id="external_address_id_value", ) # Make the request - response = client.show_nsx_credentials(request=request) + operation = client.create_external_address(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.CreateExternalAddressRequest, dict]): The request object. Request message for - [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] - private_cloud (str): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.CreateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress] + parent (str): + Required. The resource name of the private cloud to + create a new external IP address in. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_address (google.cloud.vmwareengine_v1.types.ExternalAddress): + Required. The initial description of + a new external IP address. + + This corresponds to the ``external_address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_address_id (str): + Required. The user-provided identifier of the + ``ExternalAddress`` to be created. This identifier must + be unique among ``ExternalAddress`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``external_address_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2721,13 +2936,17 @@ def sample_show_nsx_credentials(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.Credentials: - Credentials for a private cloud. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAddress` Represents an allocated external IP address and its corresponding internal IP + address in a private cloud. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([parent, external_address, external_address_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2735,26 +2954,28 @@ def sample_show_nsx_credentials(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ShowNsxCredentialsRequest. + # in a vmwareengine.CreateExternalAddressRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ShowNsxCredentialsRequest): - request = vmwareengine.ShowNsxCredentialsRequest(request) + if not isinstance(request, vmwareengine.CreateExternalAddressRequest): + request = vmwareengine.CreateExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if parent is not None: + request.parent = parent + if external_address is not None: + request.external_address = external_address + if external_address_id is not None: + request.external_address_id = external_address_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.show_nsx_credentials] + rpc = self._transport._wrapped_methods[self._transport.create_external_address] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2765,21 +2986,36 @@ def sample_show_nsx_credentials(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ExternalAddress, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - def show_vcenter_credentials( + def update_external_address( self, request: Optional[ - Union[vmwareengine.ShowVcenterCredentialsRequest, dict] + Union[vmwareengine.UpdateExternalAddressRequest, dict] ] = None, *, - private_cloud: Optional[str] = None, + external_address: Optional[vmwareengine_resources.ExternalAddress] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Gets details of credentials for Vcenter appliance. + ) -> operation.Operation: + r"""Updates the parameters of a single external IP address. Only + fields specified in ``update_mask`` are applied. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. .. code-block:: python @@ -2792,34 +3028,45 @@ def show_vcenter_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_show_vcenter_credentials(): + def sample_update_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ShowVcenterCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.UpdateExternalAddressRequest( ) # Make the request - response = client.show_vcenter_credentials(request=request) + operation = client.update_external_address(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.UpdateExternalAddressRequest, dict]): The request object. Request message for - [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] - private_cloud (str): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless - URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + [VmwareEngine.UpdateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress] + external_address (google.cloud.vmwareengine_v1.types.ExternalAddress): + Required. External IP address + description. - This corresponds to the ``private_cloud`` field + This corresponds to the ``external_address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAddress`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2829,13 +3076,17 @@ def sample_show_vcenter_credentials(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.Credentials: - Credentials for a private cloud. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAddress` Represents an allocated external IP address and its corresponding internal IP + address in a private cloud. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([external_address, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2843,25 +3094,27 @@ def sample_show_vcenter_credentials(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ShowVcenterCredentialsRequest. + # in a vmwareengine.UpdateExternalAddressRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ShowVcenterCredentialsRequest): - request = vmwareengine.ShowVcenterCredentialsRequest(request) + if not isinstance(request, vmwareengine.UpdateExternalAddressRequest): + request = vmwareengine.UpdateExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if external_address is not None: + request.external_address = external_address + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.show_vcenter_credentials] + rpc = self._transport._wrapped_methods[self._transport.update_external_address] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) + (("external_address.name", request.external_address.name),) ), ) @@ -2873,19 +3126,32 @@ def sample_show_vcenter_credentials(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ExternalAddress, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - def reset_nsx_credentials( + def delete_external_address( self, - request: Optional[Union[vmwareengine.ResetNsxCredentialsRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeleteExternalAddressRequest, dict] + ] = None, *, - private_cloud: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Resets credentials of the NSX appliance. + r"""Deletes a single external IP address. When you delete + an external IP address, connectivity between the + external IP address and the corresponding internal IP + address is lost. .. code-block:: python @@ -2898,17 +3164,17 @@ def reset_nsx_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_reset_nsx_credentials(): + def sample_delete_external_address(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ResetNsxCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.DeleteExternalAddressRequest( + name="name_value", ) # Make the request - operation = client.reset_nsx_credentials(request=request) + operation = client.delete_external_address(request=request) print("Waiting for operation to complete...") @@ -2918,18 +3184,18 @@ def sample_reset_nsx_credentials(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.DeleteExternalAddressRequest, dict]): The request object. Request message for - [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] - private_cloud (str): - Required. The resource name of the private cloud to - reset credentials for. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.DeleteExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress] + name (str): + Required. The resource name of the external IP address + to delete. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2942,16 +3208,22 @@ def sample_reset_nsx_credentials(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2959,26 +3231,24 @@ def sample_reset_nsx_credentials(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ResetNsxCredentialsRequest. + # in a vmwareengine.DeleteExternalAddressRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ResetNsxCredentialsRequest): - request = vmwareengine.ResetNsxCredentialsRequest(request) + if not isinstance(request, vmwareengine.DeleteExternalAddressRequest): + request = vmwareengine.DeleteExternalAddressRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reset_nsx_credentials] + rpc = self._transport._wrapped_methods[self._transport.delete_external_address] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -2993,25 +3263,23 @@ def sample_reset_nsx_credentials(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.PrivateCloud, + empty_pb2.Empty, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def reset_vcenter_credentials( + def list_subnets( self, - request: Optional[ - Union[vmwareengine.ResetVcenterCredentialsRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.ListSubnetsRequest, dict]] = None, *, - private_cloud: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Resets credentials of the Vcenter appliance. + ) -> pagers.ListSubnetsPager: + r"""Lists subnets in a given private cloud. .. code-block:: python @@ -3024,38 +3292,35 @@ def reset_vcenter_credentials( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_reset_vcenter_credentials(): + def sample_list_subnets(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ResetVcenterCredentialsRequest( - private_cloud="private_cloud_value", + request = vmwareengine_v1.ListSubnetsRequest( + parent="parent_value", ) # Make the request - operation = client.reset_vcenter_credentials(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + page_result = client.list_subnets(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListSubnetsRequest, dict]): The request object. Request message for - [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] - private_cloud (str): - Required. The resource name of the private cloud to - reset credentials for. Resource names are schemeless - URIs that follow the conventions in + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + parent (str): + Required. The resource name of the private cloud to be + queried for subnets. Resource names are schemeless URIs + that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``private_cloud`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3065,19 +3330,18 @@ def sample_reset_vcenter_credentials(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsPager: + Response message for + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] - The result type for the operation will be - :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` - Represents a private cloud resource. Private clouds are - zonal resources. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_cloud]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3085,28 +3349,24 @@ def sample_reset_vcenter_credentials(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ResetVcenterCredentialsRequest. + # in a vmwareengine.ListSubnetsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ResetVcenterCredentialsRequest): - request = vmwareengine.ResetVcenterCredentialsRequest(request) + if not isinstance(request, vmwareengine.ListSubnetsRequest): + request = vmwareengine.ListSubnetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_cloud is not None: - request.private_cloud = private_cloud + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.reset_vcenter_credentials - ] + rpc = self._transport._wrapped_methods[self._transport.list_subnets] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_cloud", request.private_cloud),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -3117,32 +3377,28 @@ def sample_reset_vcenter_credentials(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - vmwareengine_resources.PrivateCloud, - metadata_type=vmwareengine.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubnetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - def create_hcx_activation_key( + def get_subnet( self, - request: Optional[ - Union[vmwareengine.CreateHcxActivationKeyRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetSubnetRequest, dict]] = None, *, - parent: Optional[str] = None, - hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, - hcx_activation_key_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new HCX activation key in a given private - cloud. + ) -> vmwareengine_resources.Subnet: + r"""Gets details of a single subnet. .. code-block:: python @@ -3155,67 +3411,34 @@ def create_hcx_activation_key( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_create_hcx_activation_key(): + def sample_get_subnet(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.CreateHcxActivationKeyRequest( - parent="parent_value", - hcx_activation_key_id="hcx_activation_key_id_value", + request = vmwareengine_v1.GetSubnetRequest( + name="name_value", ) # Make the request - operation = client.create_hcx_activation_key(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + response = client.get_subnet(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetSubnetRequest, dict]): The request object. Request message for - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - parent (str): - Required. The resource name of the private cloud to - create the key for. Resource names are schemeless URIs - that follow the conventions in + [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + name (str): + Required. The resource name of the subnet to retrieve. + Resource names are schemeless URIs that follow the + conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - hcx_activation_key (google.cloud.vmwareengine_v1.types.HcxActivationKey): - Required. The initial description of - a new HCX activation key. When creating - a new key, this field must be an empty - object. - - This corresponds to the ``hcx_activation_key`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - hcx_activation_key_id (str): - Required. The user-provided identifier of the - ``HcxActivationKey`` to be created. This identifier must - be unique among ``HcxActivationKey`` resources within - the parent and becomes the final token in the name URI. - The identifier must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` - This corresponds to the ``hcx_activation_key_id`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3225,23 +3448,16 @@ def sample_create_hcx_activation_key(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.HcxActivationKey` HCX activation key. A default key is created during - private cloud provisioning, but this behavior is - subject to change and you should always verify active - keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. + google.cloud.vmwareengine_v1.types.Subnet: + Subnet in a private cloud. Either management subnets (such as vMotion) that + are read-only, or userDefined, which can also be + updated. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, hcx_activation_key, hcx_activation_key_id]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3249,30 +3465,24 @@ def sample_create_hcx_activation_key(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.CreateHcxActivationKeyRequest. + # in a vmwareengine.GetSubnetRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.CreateHcxActivationKeyRequest): - request = vmwareengine.CreateHcxActivationKeyRequest(request) + if not isinstance(request, vmwareengine.GetSubnetRequest): + request = vmwareengine.GetSubnetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if hcx_activation_key is not None: - request.hcx_activation_key = hcx_activation_key - if hcx_activation_key_id is not None: - request.hcx_activation_key_id = hcx_activation_key_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_hcx_activation_key - ] + rpc = self._transport._wrapped_methods[self._transport.get_subnet] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -3283,29 +3493,25 @@ def sample_create_hcx_activation_key(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - vmwareengine_resources.HcxActivationKey, - metadata_type=vmwareengine.OperationMetadata, - ) - # Done; return the response. return response - def list_hcx_activation_keys( + def update_subnet( self, - request: Optional[ - Union[vmwareengine.ListHcxActivationKeysRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.UpdateSubnetRequest, dict]] = None, *, - parent: Optional[str] = None, + subnet: Optional[vmwareengine_resources.Subnet] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListHcxActivationKeysPager: - r"""Lists ``HcxActivationKey`` resources in a given private cloud. + ) -> operation.Operation: + r"""Updates the parameters of a single subnet. Only fields specified + in ``update_mask`` are applied. + + *Note*: This API is synchronous and always returns a successful + ``google.longrunning.Operation`` (LRO). The returned LRO will + only have ``done`` and ``response`` fields. .. code-block:: python @@ -3318,56 +3524,63 @@ def list_hcx_activation_keys( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_hcx_activation_keys(): + def sample_update_subnet(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListHcxActivationKeysRequest( - parent="parent_value", + request = vmwareengine_v1.UpdateSubnetRequest( ) # Make the request - page_result = client.list_hcx_activation_keys(request=request) + operation = client.update_subnet(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.UpdateSubnetRequest, dict]): The request object. Request message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - parent (str): - Required. The resource name of the private cloud to be - queried for HCX activation keys. Resource names are - schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` - - This corresponds to the ``parent`` field + [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] + subnet (google.cloud.vmwareengine_v1.types.Subnet): + Required. Subnet description. + This corresponds to the ``subnet`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysPager: - Response message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``Subnet`` resource by the update. + The fields specified in the ``update_mask`` are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. - Iterating over this object will yield results and - resolve additional pages automatically. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.Subnet` Subnet in a private cloud. Either management subnets (such as vMotion) that + are read-only, or userDefined, which can also be + updated. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([subnet, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3375,24 +3588,28 @@ def sample_list_hcx_activation_keys(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListHcxActivationKeysRequest. + # in a vmwareengine.UpdateSubnetRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListHcxActivationKeysRequest): - request = vmwareengine.ListHcxActivationKeysRequest(request) + if not isinstance(request, vmwareengine.UpdateSubnetRequest): + request = vmwareengine.UpdateSubnetRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if subnet is not None: + request.subnet = subnet + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_hcx_activation_keys] + rpc = self._transport._wrapped_methods[self._transport.update_subnet] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("subnet.name", request.subnet.name),) + ), ) # Send the request. @@ -3403,28 +3620,30 @@ def sample_list_hcx_activation_keys(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListHcxActivationKeysPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.Subnet, + metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def get_hcx_activation_key( + def list_external_access_rules( self, - request: Optional[Union[vmwareengine.GetHcxActivationKeyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.ListExternalAccessRulesRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.HcxActivationKey: - r"""Retrieves a ``HcxActivationKey`` resource by its resource name. + ) -> pagers.ListExternalAccessRulesPager: + r"""Lists ``ExternalAccessRule`` resources in the specified network + policy. .. code-block:: python @@ -3437,34 +3656,35 @@ def get_hcx_activation_key( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_hcx_activation_key(): + def sample_list_external_access_rules(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetHcxActivationKeyRequest( - name="name_value", + request = vmwareengine_v1.ListExternalAccessRulesRequest( + parent="parent_value", ) # Make the request - response = client.get_hcx_activation_key(request=request) + page_result = client.list_external_access_rules(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest, dict]): The request object. Request message for - [VmwareEngine.GetHcxActivationKeys][] - name (str): - Required. The resource name of the HCX activation key to - retrieve. Resource names are schemeless URIs that follow - the conventions in + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] + parent (str): + Required. The resource name of the network policy to + query for external access firewall rules. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3474,21 +3694,18 @@ def sample_get_hcx_activation_key(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.HcxActivationKey: - HCX activation key. A default key is created during - private cloud provisioning, but this behavior is - subject to change and you should always verify active - keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAccessRulesPager: + Response message for + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3496,24 +3713,26 @@ def sample_get_hcx_activation_key(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetHcxActivationKeyRequest. + # in a vmwareengine.ListExternalAccessRulesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetHcxActivationKeyRequest): - request = vmwareengine.GetHcxActivationKeyRequest(request) + if not isinstance(request, vmwareengine.ListExternalAccessRulesRequest): + request = vmwareengine.ListExternalAccessRulesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_hcx_activation_key] + rpc = self._transport._wrapped_methods[ + self._transport.list_external_access_rules + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -3524,19 +3743,30 @@ def sample_get_hcx_activation_key(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExternalAccessRulesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - def get_network_policy( + def get_external_access_rule( self, - request: Optional[Union[vmwareengine.GetNetworkPolicyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.GetExternalAccessRuleRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NetworkPolicy: - r"""Retrieves a ``NetworkPolicy`` resource by its resource name. + ) -> vmwareengine_resources.ExternalAccessRule: + r"""Gets details of a single external access rule. .. code-block:: python @@ -3549,32 +3779,32 @@ def get_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_network_policy(): + def sample_get_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetNetworkPolicyRequest( + request = vmwareengine_v1.GetExternalAccessRuleRequest( name="name_value", ) # Make the request - response = client.get_network_policy(request=request) + response = client.get_external_access_rule(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetExternalAccessRuleRequest, dict]): The request object. Request message for - [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + [VmwareEngine.GetExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule] name (str): - Required. The resource name of the network policy to - retrieve. Resource names are schemeless URIs that follow - the conventions in + Required. The resource name of the external access + firewall rule to retrieve. Resource names are schemeless + URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3586,18 +3816,9 @@ def sample_get_network_policy(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.NetworkPolicy: - Represents a network policy resource. - Network policies are regional resources. - You can use a network policy to enable - or disable internet access and external - IP access. Network policies are - associated with a VMware Engine network, - which might span across regions. For a - given region, a network policy applies - to all private clouds in the VMware - Engine network associated with the - policy. + google.cloud.vmwareengine_v1.types.ExternalAccessRule: + External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. @@ -3611,11 +3832,11 @@ def sample_get_network_policy(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetNetworkPolicyRequest. + # in a vmwareengine.GetExternalAccessRuleRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetNetworkPolicyRequest): - request = vmwareengine.GetNetworkPolicyRequest(request) + if not isinstance(request, vmwareengine.GetExternalAccessRuleRequest): + request = vmwareengine.GetExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -3623,7 +3844,7 @@ def sample_get_network_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_network_policy] + rpc = self._transport._wrapped_methods[self._transport.get_external_access_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -3642,17 +3863,23 @@ def sample_get_network_policy(): # Done; return the response. return response - def list_network_policies( + def create_external_access_rule( self, - request: Optional[Union[vmwareengine.ListNetworkPoliciesRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.CreateExternalAccessRuleRequest, dict] + ] = None, *, parent: Optional[str] = None, + external_access_rule: Optional[ + vmwareengine_resources.ExternalAccessRule + ] = None, + external_access_rule_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNetworkPoliciesPager: - r"""Lists ``NetworkPolicy`` resources in a specified project and - location. + ) -> operation.Operation: + r"""Creates a new external access rule in a given network + policy. .. code-block:: python @@ -3665,36 +3892,68 @@ def list_network_policies( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_network_policies(): + def sample_create_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListNetworkPoliciesRequest( + request = vmwareengine_v1.CreateExternalAccessRuleRequest( parent="parent_value", + external_access_rule_id="external_access_rule_id_value", ) # Make the request - page_result = client.list_network_policies(request=request) + operation = client.create_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.CreateExternalAccessRuleRequest, dict]): The request object. Request message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + [VmwareEngine.CreateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule] parent (str): - Required. The resource name of the location (region) to - query for network policies. Resource names are - schemeless URIs that follow the conventions in + Required. The resource name of the network policy to + create a new external access firewall rule in. Resource + names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + external_access_rule (google.cloud.vmwareengine_v1.types.ExternalAccessRule): + Required. The initial description of + a new external access rule. + + This corresponds to the ``external_access_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_access_rule_id (str): + Required. The user-provided identifier of the + ``ExternalAccessRule`` to be created. This identifier + must be unique among ``ExternalAccessRule`` resources + within the parent and becomes the final token in the + name URI. The identifier must meet the following + requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``external_access_rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3702,18 +3961,19 @@ def sample_list_network_policies(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesPager: - Response message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule` External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any( + [parent, external_access_rule, external_access_rule_id] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3721,19 +3981,25 @@ def sample_list_network_policies(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListNetworkPoliciesRequest. + # in a vmwareengine.CreateExternalAccessRuleRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListNetworkPoliciesRequest): - request = vmwareengine.ListNetworkPoliciesRequest(request) + if not isinstance(request, vmwareengine.CreateExternalAccessRuleRequest): + request = vmwareengine.CreateExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if external_access_rule is not None: + request.external_access_rule = external_access_rule + if external_access_rule_id is not None: + request.external_access_rule_id = external_access_rule_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_network_policies] + rpc = self._transport._wrapped_methods[ + self._transport.create_external_access_rule + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3749,33 +4015,33 @@ def sample_list_network_policies(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNetworkPoliciesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ExternalAccessRule, + metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def create_network_policy( + def update_external_access_rule( self, - request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.UpdateExternalAccessRuleRequest, dict] + ] = None, *, - parent: Optional[str] = None, - network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, - network_policy_id: Optional[str] = None, + external_access_rule: Optional[ + vmwareengine_resources.ExternalAccessRule + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a new network policy in a given VMware Engine - network of a project and location (region). A new - network policy cannot be created if another network - policy already exists in the same scope. + r"""Updates the parameters of a single external access rule. Only + fields specified in ``update_mask`` are applied. .. code-block:: python @@ -3788,22 +4054,16 @@ def create_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_create_network_policy(): + def sample_update_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - network_policy = vmwareengine_v1.NetworkPolicy() - network_policy.edge_services_cidr = "edge_services_cidr_value" - - request = vmwareengine_v1.CreateNetworkPolicyRequest( - parent="parent_value", - network_policy_id="network_policy_id_value", - network_policy=network_policy, + request = vmwareengine_v1.UpdateExternalAccessRuleRequest( ) # Make the request - operation = client.create_network_policy(request=request) + operation = client.update_external_access_rule(request=request) print("Waiting for operation to complete...") @@ -3813,44 +4073,26 @@ def sample_create_network_policy(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.UpdateExternalAccessRuleRequest, dict]): The request object. Request message for - [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] - parent (str): - Required. The resource name of the location (region) to - create the new network policy in. Resource names are - schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): - Required. The network policy - configuration to use in the request. + [VmwareEngine.UpdateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule] + external_access_rule (google.cloud.vmwareengine_v1.types.ExternalAccessRule): + Required. Description of the external + access rule. - This corresponds to the ``network_policy`` field + This corresponds to the ``external_access_rule`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - network_policy_id (str): - Required. The user-provided identifier of the network - policy to be created. This identifier must be unique - within parent - ``projects/{my-project}/locations/{us-central1}/networkPolicies`` - and becomes the final token in the name URI. The - identifier must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAccessRule`` resource by + the update. The fields specified in the ``update_mask`` + are relative to the resource, not the full request. A + field will be overwritten if it is in the mask. If the + user does not provide a mask then all fields will be + overwritten. - This corresponds to the ``network_policy_id`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3863,20 +4105,14 @@ def sample_create_network_policy(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional - resources. You can use a network policy to enable or - disable internet access and external IP access. - Network policies are associated with a VMware Engine - network, which might span across regions. For a given - region, a network policy applies to all private - clouds in the VMware Engine network associated with - the policy. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ExternalAccessRule` External access firewall rules for filtering incoming traffic destined to + ExternalAddress resources. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, network_policy, network_policy_id]) + has_flattened_params = any([external_access_rule, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3884,28 +4120,30 @@ def sample_create_network_policy(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.CreateNetworkPolicyRequest. + # in a vmwareengine.UpdateExternalAccessRuleRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.CreateNetworkPolicyRequest): - request = vmwareengine.CreateNetworkPolicyRequest(request) + if not isinstance(request, vmwareengine.UpdateExternalAccessRuleRequest): + request = vmwareengine.UpdateExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if network_policy is not None: - request.network_policy = network_policy - if network_policy_id is not None: - request.network_policy_id = network_policy_id + if external_access_rule is not None: + request.external_access_rule = external_access_rule + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_network_policy] + rpc = self._transport._wrapped_methods[ + self._transport.update_external_access_rule + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("external_access_rule.name", request.external_access_rule.name),) + ), ) # Send the request. @@ -3920,36 +4158,25 @@ def sample_create_network_policy(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.NetworkPolicy, + vmwareengine_resources.ExternalAccessRule, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def update_network_policy( + def delete_external_access_rule( self, - request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeleteExternalAccessRuleRequest, dict] + ] = None, *, - network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Modifies a ``NetworkPolicy`` resource. Only the following fields - can be updated: ``internet_access``, ``external_ip``, - ``edge_services_cidr``. Only fields specified in ``updateMask`` - are applied. When updating a network policy, the external IP - network service can only be disabled if there are no external IP - addresses present in the scope of the policy. Also, a - ``NetworkService`` cannot be updated when - ``NetworkService.state`` is set to ``RECONCILING``. - - During operation processing, the resource is temporarily in the - ``ACTIVE`` state before the operation fully completes. For that - period of time, you can't update the resource. Use the operation - status to determine when the processing fully completes. + r"""Deletes a single external access rule. .. code-block:: python @@ -3962,20 +4189,17 @@ def update_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_update_network_policy(): + def sample_delete_external_access_rule(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - network_policy = vmwareengine_v1.NetworkPolicy() - network_policy.edge_services_cidr = "edge_services_cidr_value" - - request = vmwareengine_v1.UpdateNetworkPolicyRequest( - network_policy=network_policy, + request = vmwareengine_v1.DeleteExternalAccessRuleRequest( + name="name_value", ) # Make the request - operation = client.update_network_policy(request=request) + operation = client.delete_external_access_rule(request=request) print("Waiting for operation to complete...") @@ -3985,24 +4209,18 @@ def sample_update_network_policy(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPolicyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.DeleteExternalAccessRuleRequest, dict]): The request object. Request message for - [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] - network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): - Required. Network policy description. - This corresponds to the ``network_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ``NetworkPolicy`` resource by the - update. The fields specified in the ``update_mask`` are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. + [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule] + name (str): + Required. The resource name of the external access + firewall rule to delete. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` - This corresponds to the ``update_mask`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4015,20 +4233,22 @@ def sample_update_network_policy(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional - resources. You can use a network policy to enable or - disable internet access and external IP access. - Network policies are associated with a VMware Engine - network, which might span across regions. For a given - region, a network policy applies to all private - clouds in the VMware Engine network associated with - the policy. + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network_policy, update_mask]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4036,28 +4256,26 @@ def sample_update_network_policy(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.UpdateNetworkPolicyRequest. + # in a vmwareengine.DeleteExternalAccessRuleRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.UpdateNetworkPolicyRequest): - request = vmwareengine.UpdateNetworkPolicyRequest(request) + if not isinstance(request, vmwareengine.DeleteExternalAccessRuleRequest): + request = vmwareengine.DeleteExternalAccessRuleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if network_policy is not None: - request.network_policy = network_policy - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_network_policy] + rpc = self._transport._wrapped_methods[ + self._transport.delete_external_access_rule + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("network_policy.name", request.network_policy.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -4072,25 +4290,24 @@ def sample_update_network_policy(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.NetworkPolicy, + empty_pb2.Empty, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def delete_network_policy( + def list_logging_servers( self, - request: Optional[Union[vmwareengine.DeleteNetworkPolicyRequest, dict]] = None, + request: Optional[Union[vmwareengine.ListLoggingServersRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a ``NetworkPolicy`` resource. A network policy cannot be - deleted when ``NetworkService.state`` is set to ``RECONCILING`` - for either its external IP or internet access service. + ) -> pagers.ListLoggingServersPager: + r"""Lists logging servers configured for a given private + cloud. .. code-block:: python @@ -4103,38 +4320,35 @@ def delete_network_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_delete_network_policy(): + def sample_list_logging_servers(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.DeleteNetworkPolicyRequest( - name="name_value", + request = vmwareengine_v1.ListLoggingServersRequest( + parent="parent_value", ) # Make the request - operation = client.delete_network_policy(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + page_result = client.list_logging_servers(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListLoggingServersRequest, dict]): The request object. Request message for - [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] - name (str): - Required. The resource name of the network policy to - delete. Resource names are schemeless URIs that follow - the conventions in + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] + parent (str): + Required. The resource name of the private cloud to be + queried for logging servers. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4144,25 +4358,18 @@ def sample_delete_network_policy(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListLoggingServersPager: + Response message for + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] - } + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4170,24 +4377,24 @@ def sample_delete_network_policy(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.DeleteNetworkPolicyRequest. + # in a vmwareengine.ListLoggingServersRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.DeleteNetworkPolicyRequest): - request = vmwareengine.DeleteNetworkPolicyRequest(request) + if not isinstance(request, vmwareengine.ListLoggingServersRequest): + request = vmwareengine.ListLoggingServersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_network_policy] + rpc = self._transport._wrapped_methods[self._transport.list_logging_servers] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -4198,34 +4405,28 @@ def sample_delete_network_policy(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=vmwareengine.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLoggingServersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - def create_vmware_engine_network( + def get_logging_server( self, - request: Optional[ - Union[vmwareengine.CreateVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetLoggingServerRequest, dict]] = None, *, - parent: Optional[str] = None, - vmware_engine_network: Optional[ - vmwareengine_resources.VmwareEngineNetwork - ] = None, - vmware_engine_network_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new VMware Engine network that can be used - by a private cloud. + ) -> vmwareengine_resources.LoggingServer: + r"""Gets details of a logging server. .. code-block:: python @@ -4238,65 +4439,171 @@ def create_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_create_vmware_engine_network(): + def sample_get_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" - - request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( - parent="parent_value", - vmware_engine_network_id="vmware_engine_network_id_value", - vmware_engine_network=vmware_engine_network, + request = vmwareengine_v1.GetLoggingServerRequest( + name="name_value", ) # Make the request - operation = client.create_vmware_engine_network(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + response = client.get_logging_server(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetLoggingServerRequest, dict]): The request object. Request message for - [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] - parent (str): - Required. The resource name of the location to create - the new VMware Engine network in. A VMware Engine - network of type ``LEGACY`` is a regional resource, and a - VMware Engine network of type ``STANDARD`` is a global - resource. Resource names are schemeless URIs that follow + [VmwareEngine.GetLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer] + name (str): + Required. The resource name of the Logging Server to + retrieve. Resource names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/global`` + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.LoggingServer: + Logging server to receive vCenter or + ESXi logs. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetLoggingServerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetLoggingServerRequest): + request = vmwareengine.GetLoggingServerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_logging_server] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_logging_server( + self, + request: Optional[Union[vmwareengine.CreateLoggingServerRequest, dict]] = None, + *, + parent: Optional[str] = None, + logging_server: Optional[vmwareengine_resources.LoggingServer] = None, + logging_server_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new logging server for a given private + cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.CreateLoggingServerRequest( + parent="parent_value", + logging_server=logging_server, + logging_server_id="logging_server_id_value", + ) + + # Make the request + operation = client.create_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateLoggingServerRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer] + parent (str): + Required. The resource name of the private cloud to + create a new Logging Server in. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - vmware_engine_network (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork): + logging_server (google.cloud.vmwareengine_v1.types.LoggingServer): Required. The initial description of - the new VMware Engine network. + a new logging server. - This corresponds to the ``vmware_engine_network`` field + This corresponds to the ``logging_server`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - vmware_engine_network_id (str): - Required. The user-provided identifier of the new VMware - Engine network. This identifier must be unique among - VMware Engine network resources within the parent and - becomes the final token in the name URI. The identifier - must meet the following requirements: + logging_server_id (str): + Required. The user-provided identifier of the + ``LoggingServer`` to be created. This identifier must be + unique among ``LoggingServer`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: - - For networks of type LEGACY, adheres to the format: - ``{region-id}-default``. Replace ``{region-id}`` with - the region where you want to create the VMware Engine - network. For example, "us-central1-default". - Only contains 1-63 alphanumeric characters and hyphens - Begins with an alphabetical character @@ -4306,7 +4613,7 @@ def sample_create_vmware_engine_network(): 1034 `__ (section 3.5) - This corresponds to the ``vmware_engine_network_id`` field + This corresponds to the ``logging_server_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4319,16 +4626,15 @@ def sample_create_vmware_engine_network(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.LoggingServer` + Logging server to receive vCenter or ESXi logs. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, vmware_engine_network, vmware_engine_network_id] - ) + has_flattened_params = any([parent, logging_server, logging_server_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4336,25 +4642,23 @@ def sample_create_vmware_engine_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.CreateVmwareEngineNetworkRequest. + # in a vmwareengine.CreateLoggingServerRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.CreateVmwareEngineNetworkRequest): - request = vmwareengine.CreateVmwareEngineNetworkRequest(request) + if not isinstance(request, vmwareengine.CreateLoggingServerRequest): + request = vmwareengine.CreateLoggingServerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if vmware_engine_network is not None: - request.vmware_engine_network = vmware_engine_network - if vmware_engine_network_id is not None: - request.vmware_engine_network_id = vmware_engine_network_id + if logging_server is not None: + request.logging_server = logging_server + if logging_server_id is not None: + request.logging_server_id = logging_server_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_vmware_engine_network - ] + rpc = self._transport._wrapped_methods[self._transport.create_logging_server] # Certain fields should be provided within the metadata header; # add these here. @@ -4374,30 +4678,25 @@ def sample_create_vmware_engine_network(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.VmwareEngineNetwork, + vmwareengine_resources.LoggingServer, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def update_vmware_engine_network( + def update_logging_server( self, - request: Optional[ - Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.UpdateLoggingServerRequest, dict]] = None, *, - vmware_engine_network: Optional[ - vmwareengine_resources.VmwareEngineNetwork - ] = None, + logging_server: Optional[vmwareengine_resources.LoggingServer] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Modifies a VMware Engine network resource. Only the following - fields can be updated: ``description``. Only fields specified in - ``updateMask`` are applied. + r"""Updates the parameters of a single logging server. Only fields + specified in ``update_mask`` are applied. .. code-block:: python @@ -4410,20 +4709,23 @@ def update_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_update_vmware_engine_network(): + def sample_update_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" - - request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( - vmware_engine_network=vmware_engine_network, + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.UpdateLoggingServerRequest( + logging_server=logging_server, ) # Make the request - operation = client.update_vmware_engine_network(request=request) + operation = client.update_logging_server(request=request) print("Waiting for operation to complete...") @@ -4433,25 +4735,22 @@ def sample_update_vmware_engine_network(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.UpdateVmwareEngineNetworkRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.UpdateLoggingServerRequest, dict]): The request object. Request message for - [VmwareEngine.UpdateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.UpdateVmwareEngineNetwork] - vmware_engine_network (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork): - Required. VMware Engine network - description. - - This corresponds to the ``vmware_engine_network`` field + [VmwareEngine.UpdateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer] + logging_server (google.cloud.vmwareengine_v1.types.LoggingServer): + Required. Logging server description. + This corresponds to the ``logging_server`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the VMware Engine network resource by the + overwritten in the ``LoggingServer`` resource by the update. The fields specified in the ``update_mask`` are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user does not provide a mask then all fields will be - overwritten. Only the following fields can be updated: - ``description``. + overwritten. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -4466,14 +4765,15 @@ def sample_update_vmware_engine_network(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.LoggingServer` + Logging server to receive vCenter or ESXi logs. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([vmware_engine_network, update_mask]) + has_flattened_params = any([logging_server, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4481,29 +4781,27 @@ def sample_update_vmware_engine_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.UpdateVmwareEngineNetworkRequest. + # in a vmwareengine.UpdateLoggingServerRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.UpdateVmwareEngineNetworkRequest): - request = vmwareengine.UpdateVmwareEngineNetworkRequest(request) + if not isinstance(request, vmwareengine.UpdateLoggingServerRequest): + request = vmwareengine.UpdateLoggingServerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if vmware_engine_network is not None: - request.vmware_engine_network = vmware_engine_network + if logging_server is not None: + request.logging_server = logging_server if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.update_vmware_engine_network - ] + rpc = self._transport._wrapped_methods[self._transport.update_logging_server] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("vmware_engine_network.name", request.vmware_engine_network.name),) + (("logging_server.name", request.logging_server.name),) ), ) @@ -4519,28 +4817,23 @@ def sample_update_vmware_engine_network(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.VmwareEngineNetwork, + vmwareengine_resources.LoggingServer, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def delete_vmware_engine_network( + def delete_logging_server( self, - request: Optional[ - Union[vmwareengine.DeleteVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.DeleteLoggingServerRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a ``VmwareEngineNetwork`` resource. You can only delete - a VMware Engine network after all resources that refer to it are - deleted. For example, a private cloud, a network peering, and a - network policy can all refer to the same VMware Engine network. + r"""Deletes a single logging server. .. code-block:: python @@ -4553,17 +4846,17 @@ def delete_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_delete_vmware_engine_network(): + def sample_delete_logging_server(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.DeleteVmwareEngineNetworkRequest( + request = vmwareengine_v1.DeleteLoggingServerRequest( name="name_value", ) # Make the request - operation = client.delete_vmware_engine_network(request=request) + operation = client.delete_logging_server(request=request) print("Waiting for operation to complete...") @@ -4572,19 +4865,4516 @@ def sample_delete_vmware_engine_network(): # Handle the response print(response) - Args: - request (Union[google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest, dict]): - The request object. Request message for - [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] - name (str): - Required. The resource name of the VMware Engine network - to be deleted. Resource names are schemeless URIs that - follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + Args: + request (Union[google.cloud.vmwareengine_v1.types.DeleteLoggingServerRequest, dict]): + The request object. Request message for + [VmwareEngine.DeleteLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer] + name (str): + Required. The resource name of the logging server to + delete. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.DeleteLoggingServerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.DeleteLoggingServerRequest): + request = vmwareengine.DeleteLoggingServerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_logging_server] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_node_types( + self, + request: Optional[Union[vmwareengine.ListNodeTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNodeTypesPager: + r"""Lists node types + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_node_types(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNodeTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_node_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListNodeTypesRequest, dict]): + The request object. Request message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + parent (str): + Required. The resource name of the location to be + queried for node types. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1-a`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesPager: + Response message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListNodeTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListNodeTypesRequest): + request = vmwareengine.ListNodeTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_node_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNodeTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_node_type( + self, + request: Optional[Union[vmwareengine.GetNodeTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NodeType: + r"""Gets details of a single ``NodeType``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_node_type(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNodeTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_node_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetNodeTypeRequest, dict]): + The request object. Request message for + [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + name (str): + Required. The resource name of the node type to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NodeType: + Describes node type. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetNodeTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetNodeTypeRequest): + request = vmwareengine.GetNodeTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_node_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def show_nsx_credentials( + self, + request: Optional[Union[vmwareengine.ShowNsxCredentialsRequest, dict]] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Credentials: + r"""Gets details of credentials for NSX appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_show_nsx_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ShowNsxCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + response = client.show_nsx_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest, dict]): + The request object. Request message for + [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] + private_cloud (str): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.Credentials: + Credentials for a private cloud. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ShowNsxCredentialsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ShowNsxCredentialsRequest): + request = vmwareengine.ShowNsxCredentialsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.show_nsx_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def show_vcenter_credentials( + self, + request: Optional[ + Union[vmwareengine.ShowVcenterCredentialsRequest, dict] + ] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Credentials: + r"""Gets details of credentials for Vcenter appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_show_vcenter_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ShowVcenterCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + response = client.show_vcenter_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest, dict]): + The request object. Request message for + [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] + private_cloud (str): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.Credentials: + Credentials for a private cloud. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ShowVcenterCredentialsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ShowVcenterCredentialsRequest): + request = vmwareengine.ShowVcenterCredentialsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.show_vcenter_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reset_nsx_credentials( + self, + request: Optional[Union[vmwareengine.ResetNsxCredentialsRequest, dict]] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resets credentials of the NSX appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_reset_nsx_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ResetNsxCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + operation = client.reset_nsx_credentials(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest, dict]): + The request object. Request message for + [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] + private_cloud (str): + Required. The resource name of the private cloud to + reset credentials for. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ResetNsxCredentialsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ResetNsxCredentialsRequest): + request = vmwareengine.ResetNsxCredentialsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset_nsx_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.PrivateCloud, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def reset_vcenter_credentials( + self, + request: Optional[ + Union[vmwareengine.ResetVcenterCredentialsRequest, dict] + ] = None, + *, + private_cloud: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resets credentials of the Vcenter appliance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_reset_vcenter_credentials(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ResetVcenterCredentialsRequest( + private_cloud="private_cloud_value", + ) + + # Make the request + operation = client.reset_vcenter_credentials(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest, dict]): + The request object. Request message for + [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] + private_cloud (str): + Required. The resource name of the private cloud to + reset credentials for. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``private_cloud`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateCloud` Represents a private cloud resource. Private clouds of type STANDARD and + TIME_LIMITED are zonal resources, STRETCHED private + clouds are regional. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([private_cloud]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ResetVcenterCredentialsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ResetVcenterCredentialsRequest): + request = vmwareengine.ResetVcenterCredentialsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if private_cloud is not None: + request.private_cloud = private_cloud + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.reset_vcenter_credentials + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("private_cloud", request.private_cloud),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.PrivateCloud, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_dns_forwarding( + self, + request: Optional[Union[vmwareengine.GetDnsForwardingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.DnsForwarding: + r"""Gets details of the ``DnsForwarding`` config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsForwardingRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_forwarding(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetDnsForwardingRequest, dict]): + The request object. Request message for + [VmwareEngine.GetDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding] + name (str): + Required. The resource name of a ``DnsForwarding`` to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/dnsForwarding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.DnsForwarding: + DNS forwarding config. + This config defines a list of domain to + name server mappings, and is attached to + the private cloud for custom domain + resolution. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetDnsForwardingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetDnsForwardingRequest): + request = vmwareengine.GetDnsForwardingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dns_forwarding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dns_forwarding( + self, + request: Optional[Union[vmwareengine.UpdateDnsForwardingRequest, dict]] = None, + *, + dns_forwarding: Optional[vmwareengine_resources.DnsForwarding] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of the ``DnsForwarding`` config, like + associated domains. Only fields specified in ``update_mask`` are + applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_update_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + dns_forwarding = vmwareengine_v1.DnsForwarding() + dns_forwarding.forwarding_rules.domain = "domain_value" + dns_forwarding.forwarding_rules.name_servers = ['name_servers_value1', 'name_servers_value2'] + + request = vmwareengine_v1.UpdateDnsForwardingRequest( + dns_forwarding=dns_forwarding, + ) + + # Make the request + operation = client.update_dns_forwarding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.UpdateDnsForwardingRequest, dict]): + The request object. Request message for + [VmwareEngine.UpdateDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding] + dns_forwarding (google.cloud.vmwareengine_v1.types.DnsForwarding): + Required. DnsForwarding config + details. + + This corresponds to the ``dns_forwarding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``DnsForwarding`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsForwarding` DNS forwarding config. + This config defines a list of domain to name server + mappings, and is attached to the private cloud for + custom domain resolution. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dns_forwarding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.UpdateDnsForwardingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.UpdateDnsForwardingRequest): + request = vmwareengine.UpdateDnsForwardingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_forwarding is not None: + request.dns_forwarding = dns_forwarding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dns_forwarding] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_forwarding.name", request.dns_forwarding.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.DnsForwarding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_network_peering( + self, + request: Optional[Union[vmwareengine.GetNetworkPeeringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPeering: + r"""Retrieves a ``NetworkPeering`` resource by its resource name. + The resource contains details of the network peering, such as + peered networks, import and export custom route configurations, + and peering state. NetworkPeering is a global resource and + location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + response = client.get_network_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetNetworkPeeringRequest, dict]): + The request object. Request message for + [VmwareEngine.GetNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering] + name (str): + Required. The resource name of the network peering to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NetworkPeering: + Details of a network peering. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetNetworkPeeringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetNetworkPeeringRequest): + request = vmwareengine.GetNetworkPeeringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_network_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_network_peerings( + self, + request: Optional[Union[vmwareengine.ListNetworkPeeringsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkPeeringsPager: + r"""Lists ``NetworkPeering`` resources in a given project. + NetworkPeering is a global resource and location can only be + global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_network_peerings(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPeeringsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_peerings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest, dict]): + The request object. Request message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + parent (str): + Required. The resource name of the location (global) to + query for network peerings. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPeeringsPager: + Response message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListNetworkPeeringsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListNetworkPeeringsRequest): + request = vmwareengine.ListNetworkPeeringsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_peerings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworkPeeringsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_network_peering( + self, + request: Optional[Union[vmwareengine.CreateNetworkPeeringRequest, dict]] = None, + *, + parent: Optional[str] = None, + network_peering: Optional[vmwareengine_resources.NetworkPeering] = None, + network_peering_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new network peering between the peer network and + VMware Engine network provided in a ``NetworkPeering`` resource. + NetworkPeering is a global resource and location can only be + global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.CreateNetworkPeeringRequest( + parent="parent_value", + network_peering_id="network_peering_id_value", + network_peering=network_peering, + ) + + # Make the request + operation = client.create_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateNetworkPeeringRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering] + parent (str): + Required. The resource name of the location to create + the new network peering in. This value is always + ``global``, because ``NetworkPeering`` is a global + resource. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_peering (google.cloud.vmwareengine_v1.types.NetworkPeering): + Required. The initial description of + the new network peering. + + This corresponds to the ``network_peering`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_peering_id (str): + Required. The user-provided identifier of the new + ``NetworkPeering``. This identifier must be unique among + ``NetworkPeering`` resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``network_peering_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.NetworkPeering` + Details of a network peering. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, network_peering, network_peering_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.CreateNetworkPeeringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.CreateNetworkPeeringRequest): + request = vmwareengine.CreateNetworkPeeringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if network_peering is not None: + request.network_peering = network_peering + if network_peering_id is not None: + request.network_peering_id = network_peering_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_network_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.NetworkPeering, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_network_peering( + self, + request: Optional[Union[vmwareengine.DeleteNetworkPeeringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``NetworkPeering`` resource. When a network peering is + deleted for a VMware Engine network, the peer network becomes + inaccessible to that VMware Engine network. NetworkPeering is a + global resource and location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_delete_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPeeringRequest, dict]): + The request object. Request message for + [VmwareEngine.DeleteNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering] + name (str): + Required. The resource name of the network peering to be + deleted. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.DeleteNetworkPeeringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.DeleteNetworkPeeringRequest): + request = vmwareengine.DeleteNetworkPeeringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_network_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_network_peering( + self, + request: Optional[Union[vmwareengine.UpdateNetworkPeeringRequest, dict]] = None, + *, + network_peering: Optional[vmwareengine_resources.NetworkPeering] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies a ``NetworkPeering`` resource. Only the ``description`` + field can be updated. Only fields specified in ``updateMask`` + are applied. NetworkPeering is a global resource and location + can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_update_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.UpdateNetworkPeeringRequest( + network_peering=network_peering, + ) + + # Make the request + operation = client.update_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPeeringRequest, dict]): + The request object. Request message for + [VmwareEngine.UpdateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering] + network_peering (google.cloud.vmwareengine_v1.types.NetworkPeering): + Required. Network peering + description. + + This corresponds to the ``network_peering`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPeering`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.vmwareengine_v1.types.NetworkPeering` + Details of a network peering. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network_peering, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.UpdateNetworkPeeringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.UpdateNetworkPeeringRequest): + request = vmwareengine.UpdateNetworkPeeringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network_peering is not None: + request.network_peering = network_peering + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_network_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network_peering.name", request.network_peering.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.NetworkPeering, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_peering_routes( + self, + request: Optional[Union[vmwareengine.ListPeeringRoutesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPeeringRoutesPager: + r"""Lists the network peering routes exchanged over a + peering connection. NetworkPeering is a global resource + and location can only be global. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_peering_routes(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListPeeringRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest, dict]): + The request object. Request message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + parent (str): + Required. The resource name of the network peering to + retrieve peering routes from. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPeeringRoutesPager: + Response message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListPeeringRoutesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListPeeringRoutesRequest): + request = vmwareengine.ListPeeringRoutesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_peering_routes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPeeringRoutesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_hcx_activation_key( + self, + request: Optional[ + Union[vmwareengine.CreateHcxActivationKeyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + hcx_activation_key: Optional[vmwareengine_resources.HcxActivationKey] = None, + hcx_activation_key_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new HCX activation key in a given private + cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_hcx_activation_key(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateHcxActivationKeyRequest( + parent="parent_value", + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + # Make the request + operation = client.create_hcx_activation_key(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + parent (str): + Required. The resource name of the private cloud to + create the key for. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hcx_activation_key (google.cloud.vmwareengine_v1.types.HcxActivationKey): + Required. The initial description of + a new HCX activation key. When creating + a new key, this field must be an empty + object. + + This corresponds to the ``hcx_activation_key`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + hcx_activation_key_id (str): + Required. The user-provided identifier of the + ``HcxActivationKey`` to be created. This identifier must + be unique among ``HcxActivationKey`` resources within + the parent and becomes the final token in the name URI. + The identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``hcx_activation_key_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.HcxActivationKey` HCX activation key. A default key is created during + private cloud provisioning, but this behavior is + subject to change and you should always verify active + keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, hcx_activation_key, hcx_activation_key_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.CreateHcxActivationKeyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.CreateHcxActivationKeyRequest): + request = vmwareengine.CreateHcxActivationKeyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if hcx_activation_key is not None: + request.hcx_activation_key = hcx_activation_key + if hcx_activation_key_id is not None: + request.hcx_activation_key_id = hcx_activation_key_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_hcx_activation_key + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.HcxActivationKey, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_hcx_activation_keys( + self, + request: Optional[ + Union[vmwareengine.ListHcxActivationKeysRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListHcxActivationKeysPager: + r"""Lists ``HcxActivationKey`` resources in a given private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_hcx_activation_keys(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListHcxActivationKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_hcx_activation_keys(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest, dict]): + The request object. Request message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + parent (str): + Required. The resource name of the private cloud to be + queried for HCX activation keys. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysPager: + Response message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListHcxActivationKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListHcxActivationKeysRequest): + request = vmwareengine.ListHcxActivationKeysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_hcx_activation_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListHcxActivationKeysPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_hcx_activation_key( + self, + request: Optional[Union[vmwareengine.GetHcxActivationKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.HcxActivationKey: + r"""Retrieves a ``HcxActivationKey`` resource by its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_hcx_activation_key(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetHcxActivationKeyRequest( + name="name_value", + ) + + # Make the request + response = client.get_hcx_activation_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest, dict]): + The request object. Request message for + [VmwareEngine.GetHcxActivationKeys][] + name (str): + Required. The resource name of the HCX activation key to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.HcxActivationKey: + HCX activation key. A default key is created during + private cloud provisioning, but this behavior is + subject to change and you should always verify active + keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetHcxActivationKeyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetHcxActivationKeyRequest): + request = vmwareengine.GetHcxActivationKeyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_hcx_activation_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_network_policy( + self, + request: Optional[Union[vmwareengine.GetNetworkPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPolicy: + r"""Retrieves a ``NetworkPolicy`` resource by its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest, dict]): + The request object. Request message for + [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + name (str): + Required. The resource name of the network policy to + retrieve. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.NetworkPolicy: + Represents a network policy resource. + Network policies are regional resources. + You can use a network policy to enable + or disable internet access and external + IP access. Network policies are + associated with a VMware Engine network, + which might span across regions. For a + given region, a network policy applies + to all private clouds in the VMware + Engine network associated with the + policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetNetworkPolicyRequest): + request = vmwareengine.GetNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_network_policies( + self, + request: Optional[Union[vmwareengine.ListNetworkPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkPoliciesPager: + r"""Lists ``NetworkPolicy`` resources in a specified project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_network_policies(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest, dict]): + The request object. Request message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + parent (str): + Required. The resource name of the location (region) to + query for network policies. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesPager: + Response message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListNetworkPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListNetworkPoliciesRequest): + request = vmwareengine.ListNetworkPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworkPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_network_policy( + self, + request: Optional[Union[vmwareengine.CreateNetworkPolicyRequest, dict]] = None, + *, + parent: Optional[str] = None, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, + network_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new network policy in a given VMware Engine + network of a project and location (region). A new + network policy cannot be created if another network + policy already exists in the same scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_policy = vmwareengine_v1.NetworkPolicy() + network_policy.edge_services_cidr = "edge_services_cidr_value" + + request = vmwareengine_v1.CreateNetworkPolicyRequest( + parent="parent_value", + network_policy_id="network_policy_id_value", + network_policy=network_policy, + ) + + # Make the request + operation = client.create_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + parent (str): + Required. The resource name of the location (region) to + create the new network policy in. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): + Required. The network policy + configuration to use in the request. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy_id (str): + Required. The user-provided identifier of the network + policy to be created. This identifier must be unique + within parent + ``projects/{my-project}/locations/{us-central1}/networkPolicies`` + and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``network_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional + resources. You can use a network policy to enable or + disable internet access and external IP access. + Network policies are associated with a VMware Engine + network, which might span across regions. For a given + region, a network policy applies to all private + clouds in the VMware Engine network associated with + the policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, network_policy, network_policy_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.CreateNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.CreateNetworkPolicyRequest): + request = vmwareengine.CreateNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if network_policy is not None: + request.network_policy = network_policy + if network_policy_id is not None: + request.network_policy_id = network_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.NetworkPolicy, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_network_policy( + self, + request: Optional[Union[vmwareengine.UpdateNetworkPolicyRequest, dict]] = None, + *, + network_policy: Optional[vmwareengine_resources.NetworkPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies a ``NetworkPolicy`` resource. Only the following fields + can be updated: ``internet_access``, ``external_ip``, + ``edge_services_cidr``. Only fields specified in ``updateMask`` + are applied. When updating a network policy, the external IP + network service can only be disabled if there are no external IP + addresses present in the scope of the policy. Also, a + ``NetworkService`` cannot be updated when + ``NetworkService.state`` is set to ``RECONCILING``. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_update_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_policy = vmwareengine_v1.NetworkPolicy() + network_policy.edge_services_cidr = "edge_services_cidr_value" + + request = vmwareengine_v1.UpdateNetworkPolicyRequest( + network_policy=network_policy, + ) + + # Make the request + operation = client.update_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.UpdateNetworkPolicyRequest, dict]): + The request object. Request message for + [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] + network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): + Required. Network policy description. + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPolicy`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.NetworkPolicy` Represents a network policy resource. Network policies are regional + resources. You can use a network policy to enable or + disable internet access and external IP access. + Network policies are associated with a VMware Engine + network, which might span across regions. For a given + region, a network policy applies to all private + clouds in the VMware Engine network associated with + the policy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network_policy, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.UpdateNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.UpdateNetworkPolicyRequest): + request = vmwareengine.UpdateNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network_policy is not None: + request.network_policy = network_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network_policy.name", request.network_policy.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.NetworkPolicy, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_network_policy( + self, + request: Optional[Union[vmwareengine.DeleteNetworkPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``NetworkPolicy`` resource. A network policy cannot be + deleted when ``NetworkService.state`` is set to ``RECONCILING`` + for either its external IP or internet access service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_delete_network_policy(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest, dict]): + The request object. Request message for + [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + name (str): + Required. The resource name of the network policy to + delete. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.DeleteNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.DeleteNetworkPolicyRequest): + request = vmwareengine.DeleteNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_management_dns_zone_bindings( + self, + request: Optional[ + Union[vmwareengine.ListManagementDnsZoneBindingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListManagementDnsZoneBindingsPager: + r"""Lists Consumer VPCs bound to Management DNS Zone of a + given private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_management_dns_zone_bindings(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListManagementDnsZoneBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_management_dns_zone_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest, dict]): + The request object. Request message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + parent (str): + Required. The resource name of the private cloud to be + queried for management DNS zone bindings. Resource names + are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListManagementDnsZoneBindingsPager: + Response message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListManagementDnsZoneBindingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListManagementDnsZoneBindingsRequest): + request = vmwareengine.ListManagementDnsZoneBindingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_management_dns_zone_bindings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListManagementDnsZoneBindingsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.GetManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.ManagementDnsZoneBinding: + r"""Retrieves a 'ManagementDnsZoneBinding' resource by + its resource name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_management_dns_zone_binding(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetManagementDnsZoneBindingRequest, dict]): + The request object. Request message for + [VmwareEngine.GetManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding] + name (str): + Required. The resource name of the management DNS zone + binding to retrieve. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding: + Represents a binding between a + network and the management DNS zone. A + management DNS zone is the Cloud DNS + cross-project binding zone that VMware + Engine creates for each private cloud. + It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi + hosts and management VM appliances like + vCenter and NSX Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetManagementDnsZoneBindingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetManagementDnsZoneBindingRequest): + request = vmwareengine.GetManagementDnsZoneBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_management_dns_zone_binding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.CreateManagementDnsZoneBindingRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + management_dns_zone_binding: Optional[ + vmwareengine_resources.ManagementDnsZoneBinding + ] = None, + management_dns_zone_binding_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new ``ManagementDnsZoneBinding`` resource in a private + cloud. This RPC creates the DNS binding and the resource that + represents the DNS binding of the consumer VPC network to the + management DNS zone. A management DNS zone is the Cloud DNS + cross-project binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP addresses + for the private cloud's ESXi hosts and management VM appliances + like vCenter and NSX Manager. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.CreateManagementDnsZoneBindingRequest( + parent="parent_value", + management_dns_zone_binding=management_dns_zone_binding, + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Make the request + operation = client.create_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateManagementDnsZoneBindingRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateManagementDnsZoneBindings][] + parent (str): + Required. The resource name of the private cloud to + create a new management DNS zone binding for. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_dns_zone_binding (google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding): + Required. The initial values for a + new management DNS zone binding. + + This corresponds to the ``management_dns_zone_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management_dns_zone_binding_id (str): + Required. The user-provided identifier of the + ``ManagementDnsZoneBinding`` resource to be created. + This identifier must be unique among + ``ManagementDnsZoneBinding`` resources within the parent + and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``management_dns_zone_binding_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, management_dns_zone_binding, management_dns_zone_binding_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.CreateManagementDnsZoneBindingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.CreateManagementDnsZoneBindingRequest): + request = vmwareengine.CreateManagementDnsZoneBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if management_dns_zone_binding is not None: + request.management_dns_zone_binding = management_dns_zone_binding + if management_dns_zone_binding_id is not None: + request.management_dns_zone_binding_id = management_dns_zone_binding_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_management_dns_zone_binding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.UpdateManagementDnsZoneBindingRequest, dict] + ] = None, + *, + management_dns_zone_binding: Optional[ + vmwareengine_resources.ManagementDnsZoneBinding + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``ManagementDnsZoneBinding`` resource. Only fields + specified in ``update_mask`` are applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_update_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.UpdateManagementDnsZoneBindingRequest( + management_dns_zone_binding=management_dns_zone_binding, + ) + + # Make the request + operation = client.update_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.UpdateManagementDnsZoneBindingRequest, dict]): + The request object. Request message for + [VmwareEngine.UpdateManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding] + management_dns_zone_binding (google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding): + Required. New values to update the + management DNS zone binding with. + + This corresponds to the ``management_dns_zone_binding`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``ManagementDnsZoneBinding`` resource + by the update. The fields specified in the + ``update_mask`` are relative to the resource, not the + full request. A field will be overwritten if it is in + the mask. If the user does not provide a mask then all + fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([management_dns_zone_binding, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.UpdateManagementDnsZoneBindingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.UpdateManagementDnsZoneBindingRequest): + request = vmwareengine.UpdateManagementDnsZoneBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if management_dns_zone_binding is not None: + request.management_dns_zone_binding = management_dns_zone_binding + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_management_dns_zone_binding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "management_dns_zone_binding.name", + request.management_dns_zone_binding.name, + ), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.DeleteManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``ManagementDnsZoneBinding`` resource. When a + management DNS zone binding is deleted, the corresponding + consumer VPC network is no longer bound to the management DNS + zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_delete_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.DeleteManagementDnsZoneBindingRequest, dict]): + The request object. Request message for + [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding] + name (str): + Required. The resource name of the management DNS zone + binding to delete. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.DeleteManagementDnsZoneBindingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.DeleteManagementDnsZoneBindingRequest): + request = vmwareengine.DeleteManagementDnsZoneBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_management_dns_zone_binding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def repair_management_dns_zone_binding( + self, + request: Optional[ + Union[vmwareengine.RepairManagementDnsZoneBindingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Retries to create a ``ManagementDnsZoneBinding`` resource that + is in failed state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_repair_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.RepairManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.repair_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.RepairManagementDnsZoneBindingRequest, dict]): + The request object. Request message for + [VmwareEngine.RepairManagementDnsZoneBindings][] + name (str): + Required. The resource name of the management DNS zone + binding to repair. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding` Represents a binding between a network and the management DNS zone. + A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi hosts and + management VM appliances like vCenter and NSX + Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.RepairManagementDnsZoneBindingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.RepairManagementDnsZoneBindingRequest): + request = vmwareengine.RepairManagementDnsZoneBindingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.repair_management_dns_zone_binding + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.ManagementDnsZoneBinding, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.CreateVmwareEngineNetworkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, + vmware_engine_network_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new VMware Engine network that can be used + by a private cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() + vmware_engine_network.type_ = "STANDARD" + + request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( + parent="parent_value", + vmware_engine_network_id="vmware_engine_network_id_value", + vmware_engine_network=vmware_engine_network, + ) + + # Make the request + operation = client.create_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest, dict]): + The request object. Request message for + [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] + parent (str): + Required. The resource name of the location to create + the new VMware Engine network in. A VMware Engine + network of type ``LEGACY`` is a regional resource, and a + VMware Engine network of type ``STANDARD`` is a global + resource. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vmware_engine_network (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork): + Required. The initial description of + the new VMware Engine network. + + This corresponds to the ``vmware_engine_network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vmware_engine_network_id (str): + Required. The user-provided identifier of the new VMware + Engine network. This identifier must be unique among + VMware Engine network resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: + + - For networks of type LEGACY, adheres to the format: + ``{region-id}-default``. Replace ``{region-id}`` with + the region where you want to create the VMware Engine + network. For example, "us-central1-default". + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + + This corresponds to the ``vmware_engine_network_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, vmware_engine_network, vmware_engine_network_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.CreateVmwareEngineNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.CreateVmwareEngineNetworkRequest): + request = vmwareengine.CreateVmwareEngineNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vmware_engine_network is not None: + request.vmware_engine_network = vmware_engine_network + if vmware_engine_network_id is not None: + request.vmware_engine_network_id = vmware_engine_network_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_vmware_engine_network + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.VmwareEngineNetwork, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.UpdateVmwareEngineNetworkRequest, dict] + ] = None, + *, + vmware_engine_network: Optional[ + vmwareengine_resources.VmwareEngineNetwork + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies a VMware Engine network resource. Only the following + fields can be updated: ``description``. Only fields specified in + ``updateMask`` are applied. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_update_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() + vmware_engine_network.type_ = "STANDARD" + + request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( + vmware_engine_network=vmware_engine_network, + ) + + # Make the request + operation = client.update_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.UpdateVmwareEngineNetworkRequest, dict]): + The request object. Request message for + [VmwareEngine.UpdateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.UpdateVmwareEngineNetwork] + vmware_engine_network (google.cloud.vmwareengine_v1.types.VmwareEngineNetwork): + Required. VMware Engine network + description. + + This corresponds to the ``vmware_engine_network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the VMware Engine network resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. Only the following fields can be updated: + ``description``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.VmwareEngineNetwork` VMware Engine network resource that provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([vmware_engine_network, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.UpdateVmwareEngineNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.UpdateVmwareEngineNetworkRequest): + request = vmwareengine.UpdateVmwareEngineNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vmware_engine_network is not None: + request.vmware_engine_network = vmware_engine_network + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_vmware_engine_network + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vmware_engine_network.name", request.vmware_engine_network.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.VmwareEngineNetwork, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.DeleteVmwareEngineNetworkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``VmwareEngineNetwork`` resource. You can only delete + a VMware Engine network after all resources that refer to it are + deleted. For example, a private cloud, a network peering, and a + network policy can all refer to the same VMware Engine network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_delete_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteVmwareEngineNetworkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vmware_engine_network(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest, dict]): + The request object. Request message for + [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] + name (str): + Required. The resource name of the VMware Engine network + to be deleted. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.DeleteVmwareEngineNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.DeleteVmwareEngineNetworkRequest): + request = vmwareengine.DeleteVmwareEngineNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_vmware_engine_network + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_vmware_engine_network( + self, + request: Optional[ + Union[vmwareengine.GetVmwareEngineNetworkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.VmwareEngineNetwork: + r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource + name. The resource contains details of the VMware Engine + network, such as its VMware Engine network type, peered networks + in a service project, and state (for example, ``CREATING``, + ``ACTIVE``, ``DELETING``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_get_vmware_engine_network(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetVmwareEngineNetworkRequest( + name="name_value", + ) + + # Make the request + response = client.get_vmware_engine_network(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest, dict]): + The request object. Request message for + [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] + name (str): + Required. The resource name of the VMware Engine network + to retrieve. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.types.VmwareEngineNetwork: + VMware Engine network resource that + provides connectivity for VMware Engine + private clouds. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.GetVmwareEngineNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.GetVmwareEngineNetworkRequest): + request = vmwareengine.GetVmwareEngineNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_vmware_engine_network + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_vmware_engine_networks( + self, + request: Optional[ + Union[vmwareengine.ListVmwareEngineNetworksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVmwareEngineNetworksPager: + r"""Lists ``VmwareEngineNetwork`` resources in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_list_vmware_engine_networks(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListVmwareEngineNetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vmware_engine_networks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest, dict]): + The request object. Request message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + parent (str): + Required. The resource name of the location to query for + VMware Engine networks. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksPager: + Response message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vmwareengine.ListVmwareEngineNetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vmwareengine.ListVmwareEngineNetworksRequest): + request = vmwareengine.ListVmwareEngineNetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_vmware_engine_networks + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVmwareEngineNetworksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_private_connection( + self, + request: Optional[ + Union[vmwareengine.CreatePrivateConnectionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new private connection that can be used for + accessing private Clouds. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import vmwareengine_v1 + + def sample_create_private_connection(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + private_connection = vmwareengine_v1.PrivateConnection() + private_connection.vmware_engine_network = "vmware_engine_network_value" + private_connection.type_ = "THIRD_PARTY_SERVICE" + private_connection.service_network = "service_network_value" + + request = vmwareengine_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest, dict]): + The request object. Request message for + [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] + parent (str): + Required. The resource name of the location to create + the new private connection in. Private connection is a + regional resource. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (google.cloud.vmwareengine_v1.types.PrivateConnection): + Required. The initial description of + the new private connection. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (str): + Required. The user-provided identifier of the new + private connection. This identifier must be unique among + private connection resources within the parent and + becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and + hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) - This corresponds to the ``name`` field + This corresponds to the ``private_connection_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -4597,22 +9387,14 @@ def sample_delete_vmware_engine_network(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateConnection` Private connection resource that provides connectivity for VMware Engine + private clouds. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent, private_connection, private_connection_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4620,26 +9402,30 @@ def sample_delete_vmware_engine_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.DeleteVmwareEngineNetworkRequest. + # in a vmwareengine.CreatePrivateConnectionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.DeleteVmwareEngineNetworkRequest): - request = vmwareengine.DeleteVmwareEngineNetworkRequest(request) + if not isinstance(request, vmwareengine.CreatePrivateConnectionRequest): + request = vmwareengine.CreatePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.delete_vmware_engine_network + self._transport.create_private_connection ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -4654,29 +9440,25 @@ def sample_delete_vmware_engine_network(): response = operation.from_gapic( response, self._transport.operations_client, - empty_pb2.Empty, + vmwareengine_resources.PrivateConnection, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def get_vmware_engine_network( + def get_private_connection( self, - request: Optional[ - Union[vmwareengine.GetVmwareEngineNetworkRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetPrivateConnectionRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.VmwareEngineNetwork: - r"""Retrieves a ``VmwareEngineNetwork`` resource by its resource - name. The resource contains details of the VMware Engine - network, such as its VMware Engine network type, peered networks - in a service project, and state (for example, ``CREATING``, - ``ACTIVE``, ``DELETING``). + ) -> vmwareengine_resources.PrivateConnection: + r"""Retrieves a ``PrivateConnection`` resource by its resource name. + The resource contains details of the private connection, such as + connected network, routing mode and state. .. code-block:: python @@ -4689,32 +9471,32 @@ def get_vmware_engine_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_vmware_engine_network(): + def sample_get_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetVmwareEngineNetworkRequest( + request = vmwareengine_v1.GetPrivateConnectionRequest( name="name_value", ) # Make the request - response = client.get_vmware_engine_network(request=request) + response = client.get_private_connection(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest, dict]): The request object. Request message for - [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] + [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] name (str): - Required. The resource name of the VMware Engine network - to retrieve. Resource names are schemeless URIs that - follow the conventions in + Required. The resource name of the private connection to + retrieve. Resource names are schemeless URIs that follow + the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/global/vmwareEngineNetworks/my-network`` + ``projects/my-project/locations/us-central1/privateConnections/my-connection`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -4726,8 +9508,8 @@ def sample_get_vmware_engine_network(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.VmwareEngineNetwork: - VMware Engine network resource that + google.cloud.vmwareengine_v1.types.PrivateConnection: + Private connection resource that provides connectivity for VMware Engine private clouds. @@ -4743,11 +9525,11 @@ def sample_get_vmware_engine_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetVmwareEngineNetworkRequest. + # in a vmwareengine.GetPrivateConnectionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetVmwareEngineNetworkRequest): - request = vmwareengine.GetVmwareEngineNetworkRequest(request) + if not isinstance(request, vmwareengine.GetPrivateConnectionRequest): + request = vmwareengine.GetPrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -4755,9 +9537,7 @@ def sample_get_vmware_engine_network(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.get_vmware_engine_network - ] + rpc = self._transport._wrapped_methods[self._transport.get_private_connection] # Certain fields should be provided within the metadata header; # add these here. @@ -4776,18 +9556,18 @@ def sample_get_vmware_engine_network(): # Done; return the response. return response - def list_vmware_engine_networks( + def list_private_connections( self, request: Optional[ - Union[vmwareengine.ListVmwareEngineNetworksRequest, dict] + Union[vmwareengine.ListPrivateConnectionsRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListVmwareEngineNetworksPager: - r"""Lists ``VmwareEngineNetwork`` resources in a given project and + ) -> pagers.ListPrivateConnectionsPager: + r"""Lists ``PrivateConnection`` resources in a given project and location. .. code-block:: python @@ -4801,32 +9581,32 @@ def list_vmware_engine_networks( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_vmware_engine_networks(): + def sample_list_private_connections(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListVmwareEngineNetworksRequest( + request = vmwareengine_v1.ListPrivateConnectionsRequest( parent="parent_value", ) # Make the request - page_result = client.list_vmware_engine_networks(request=request) + page_result = client.list_private_connections(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest, dict]): The request object. Request message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] parent (str): Required. The resource name of the location to query for - VMware Engine networks. Resource names are schemeless - URIs that follow the conventions in + private connections. Resource names are schemeless URIs + that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/global`` + example: ``projects/my-project/locations/us-central1`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -4838,9 +9618,9 @@ def sample_list_vmware_engine_networks(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsPager: Response message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] Iterating over this object will yield results and resolve additional pages automatically. @@ -4857,11 +9637,11 @@ def sample_list_vmware_engine_networks(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListVmwareEngineNetworksRequest. + # in a vmwareengine.ListPrivateConnectionsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListVmwareEngineNetworksRequest): - request = vmwareengine.ListVmwareEngineNetworksRequest(request) + if not isinstance(request, vmwareengine.ListPrivateConnectionsRequest): + request = vmwareengine.ListPrivateConnectionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -4869,9 +9649,7 @@ def sample_list_vmware_engine_networks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.list_vmware_engine_networks - ] + rpc = self._transport._wrapped_methods[self._transport.list_private_connections] # Certain fields should be provided within the metadata header; # add these here. @@ -4889,7 +9667,7 @@ def sample_list_vmware_engine_networks(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListVmwareEngineNetworksPager( + response = pagers.ListPrivateConnectionsPager( method=rpc, request=request, response=response, @@ -4899,21 +9677,21 @@ def sample_list_vmware_engine_networks(): # Done; return the response. return response - def create_private_connection( + def update_private_connection( self, request: Optional[ - Union[vmwareengine.CreatePrivateConnectionRequest, dict] + Union[vmwareengine.UpdatePrivateConnectionRequest, dict] ] = None, *, - parent: Optional[str] = None, private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, - private_connection_id: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a new private connection that can be used for - accessing private Clouds. + r"""Modifies a ``PrivateConnection`` resource. Only ``description`` + and ``routing_mode`` fields can be updated. Only fields + specified in ``updateMask`` are applied. .. code-block:: python @@ -4926,7 +9704,7 @@ def create_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_create_private_connection(): + def sample_update_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineClient() @@ -4936,14 +9714,12 @@ def sample_create_private_connection(): private_connection.type_ = "THIRD_PARTY_SERVICE" private_connection.service_network = "service_network_value" - request = vmwareengine_v1.CreatePrivateConnectionRequest( - parent="parent_value", - private_connection_id="private_connection_id_value", + request = vmwareengine_v1.UpdatePrivateConnectionRequest( private_connection=private_connection, ) # Make the request - operation = client.create_private_connection(request=request) + operation = client.update_private_connection(request=request) print("Waiting for operation to complete...") @@ -4953,44 +9729,26 @@ def sample_create_private_connection(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.UpdatePrivateConnectionRequest, dict]): The request object. Request message for - [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] - parent (str): - Required. The resource name of the location to create - the new private connection in. Private connection is a - regional resource. Resource names are schemeless URIs - that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + [VmwareEngine.UpdatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.UpdatePrivateConnection] private_connection (google.cloud.vmwareengine_v1.types.PrivateConnection): - Required. The initial description of - the new private connection. + Required. Private connection + description. This corresponds to the ``private_connection`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - private_connection_id (str): - Required. The user-provided identifier of the new - private connection. This identifier must be unique among - private connection resources within the parent and - becomes the final token in the name URI. The identifier - must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and - hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``PrivateConnection`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. - This corresponds to the ``private_connection_id`` field + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5010,7 +9768,7 @@ def sample_create_private_connection(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, private_connection, private_connection_id]) + has_flattened_params = any([private_connection, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5018,30 +9776,30 @@ def sample_create_private_connection(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.CreatePrivateConnectionRequest. + # in a vmwareengine.UpdatePrivateConnectionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.CreatePrivateConnectionRequest): - request = vmwareengine.CreatePrivateConnectionRequest(request) + if not isinstance(request, vmwareengine.UpdatePrivateConnectionRequest): + request = vmwareengine.UpdatePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent if private_connection is not None: request.private_connection = private_connection - if private_connection_id is not None: - request.private_connection_id = private_connection_id + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.create_private_connection + self._transport.update_private_connection ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata( + (("private_connection.name", request.private_connection.name),) + ), ) # Send the request. @@ -5063,18 +9821,20 @@ def sample_create_private_connection(): # Done; return the response. return response - def get_private_connection( + def delete_private_connection( self, - request: Optional[Union[vmwareengine.GetPrivateConnectionRequest, dict]] = None, + request: Optional[ + Union[vmwareengine.DeletePrivateConnectionRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.PrivateConnection: - r"""Retrieves a ``PrivateConnection`` resource by its resource name. - The resource contains details of the private connection, such as - connected network, routing mode and state. + ) -> operation.Operation: + r"""Deletes a ``PrivateConnection`` resource. When a private + connection is deleted for a VMware Engine network, the connected + network becomes inaccessible to that VMware Engine network. .. code-block:: python @@ -5087,29 +9847,33 @@ def get_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_get_private_connection(): + def sample_delete_private_connection(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.GetPrivateConnectionRequest( + request = vmwareengine_v1.DeletePrivateConnectionRequest( name="name_value", ) # Make the request - response = client.get_private_connection(request=request) + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest, dict]): The request object. Request message for - [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] + [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] name (str): Required. The resource name of the private connection to - retrieve. Resource names are schemeless URIs that follow - the conventions in + be deleted. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1/privateConnections/my-connection`` @@ -5124,10 +9888,19 @@ def sample_get_private_connection(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.types.PrivateConnection: - Private connection resource that - provides connectivity for VMware Engine - private clouds. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -5141,11 +9914,11 @@ def sample_get_private_connection(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.GetPrivateConnectionRequest. + # in a vmwareengine.DeletePrivateConnectionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.GetPrivateConnectionRequest): - request = vmwareengine.GetPrivateConnectionRequest(request) + if not isinstance(request, vmwareengine.DeletePrivateConnectionRequest): + request = vmwareengine.DeletePrivateConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -5153,7 +9926,9 @@ def sample_get_private_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_private_connection] + rpc = self._transport._wrapped_methods[ + self._transport.delete_private_connection + ] # Certain fields should be provided within the metadata header; # add these here. @@ -5169,22 +9944,30 @@ def sample_get_private_connection(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=vmwareengine.OperationMetadata, + ) + # Done; return the response. return response - def list_private_connections( + def list_private_connection_peering_routes( self, request: Optional[ - Union[vmwareengine.ListPrivateConnectionsRequest, dict] + Union[vmwareengine.ListPrivateConnectionPeeringRoutesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionsPager: - r"""Lists ``PrivateConnection`` resources in a given project and - location. + ) -> pagers.ListPrivateConnectionPeeringRoutesPager: + r"""Lists the private connection routes exchanged over a + peering connection. .. code-block:: python @@ -5197,32 +9980,33 @@ def list_private_connections( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_private_connections(): + def sample_list_private_connection_peering_routes(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListPrivateConnectionsRequest( + request = vmwareengine_v1.ListPrivateConnectionPeeringRoutesRequest( parent="parent_value", ) # Make the request - page_result = client.list_private_connections(request=request) + page_result = client.list_private_connection_peering_routes(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest, dict]): The request object. Request message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] parent (str): - Required. The resource name of the location to query for - private connections. Resource names are schemeless URIs - that follow the conventions in + Required. The resource name of the private connection to + retrieve peering routes from. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` + example: + ``projects/my-project/locations/us-west1/privateConnections/my-connection`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -5234,9 +10018,9 @@ def sample_list_private_connections(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsPager: + google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesPager: Response message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] Iterating over this object will yield results and resolve additional pages automatically. @@ -5253,11 +10037,13 @@ def sample_list_private_connections(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListPrivateConnectionsRequest. + # in a vmwareengine.ListPrivateConnectionPeeringRoutesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.ListPrivateConnectionsRequest): - request = vmwareengine.ListPrivateConnectionsRequest(request) + if not isinstance( + request, vmwareengine.ListPrivateConnectionPeeringRoutesRequest + ): + request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -5265,7 +10051,9 @@ def sample_list_private_connections(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_private_connections] + rpc = self._transport._wrapped_methods[ + self._transport.list_private_connection_peering_routes + ] # Certain fields should be provided within the metadata header; # add these here. @@ -5283,7 +10071,7 @@ def sample_list_private_connections(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListPrivateConnectionsPager( + response = pagers.ListPrivateConnectionPeeringRoutesPager( method=rpc, request=request, response=response, @@ -5293,21 +10081,23 @@ def sample_list_private_connections(): # Done; return the response. return response - def update_private_connection( + def grant_dns_bind_permission( self, request: Optional[ - Union[vmwareengine.UpdatePrivateConnectionRequest, dict] + Union[vmwareengine.GrantDnsBindPermissionRequest, dict] ] = None, *, - private_connection: Optional[vmwareengine_resources.PrivateConnection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + name: Optional[str] = None, + principal: Optional[vmwareengine_resources.Principal] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Modifies a ``PrivateConnection`` resource. Only ``description`` - and ``routing_mode`` fields can be updated. Only fields - specified in ``updateMask`` are applied. + r"""Grants the bind permission to the customer provided + principal(user / service account) to bind their DNS zone + with the intranet VPC associated with the project. + DnsBindPermission is a global resource and location can + only be global. .. code-block:: python @@ -5320,22 +10110,21 @@ def update_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_update_private_connection(): + def sample_grant_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - private_connection = vmwareengine_v1.PrivateConnection() - private_connection.vmware_engine_network = "vmware_engine_network_value" - private_connection.type_ = "THIRD_PARTY_SERVICE" - private_connection.service_network = "service_network_value" + principal = vmwareengine_v1.Principal() + principal.user = "user_value" - request = vmwareengine_v1.UpdatePrivateConnectionRequest( - private_connection=private_connection, + request = vmwareengine_v1.GrantDnsBindPermissionRequest( + name="name_value", + principal=principal, ) # Make the request - operation = client.update_private_connection(request=request) + operation = client.grant_dns_bind_permission(request=request) print("Waiting for operation to complete...") @@ -5345,26 +10134,30 @@ def sample_update_private_connection(): print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.UpdatePrivateConnectionRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GrantDnsBindPermissionRequest, dict]): The request object. Request message for - [VmwareEngine.UpdatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.UpdatePrivateConnection] - private_connection (google.cloud.vmwareengine_v1.types.PrivateConnection): - Required. Private connection - description. + [VmwareEngine.GrantDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission] + name (str): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` - This corresponds to the ``private_connection`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ``PrivateConnection`` resource by the - update. The fields specified in the ``update_mask`` are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - This corresponds to the ``update_mask`` field + principal (google.cloud.vmwareengine_v1.types.Principal): + Required. The consumer provided + user/service account which needs to be + granted permission to bind with the + intranet VPC corresponding to the + consumer project. + + This corresponds to the ``principal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5377,14 +10170,15 @@ def sample_update_private_connection(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.PrivateConnection` Private connection resource that provides connectivity for VMware Engine - private clouds. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsBindPermission` DnsBindPermission resource that contains the accounts having the consumer DNS + bind permission on the corresponding intranet VPC of + the consumer project. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([private_connection, update_mask]) + has_flattened_params = any([name, principal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5392,30 +10186,28 @@ def sample_update_private_connection(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.UpdatePrivateConnectionRequest. + # in a vmwareengine.GrantDnsBindPermissionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.UpdatePrivateConnectionRequest): - request = vmwareengine.UpdatePrivateConnectionRequest(request) + if not isinstance(request, vmwareengine.GrantDnsBindPermissionRequest): + request = vmwareengine.GrantDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if private_connection is not None: - request.private_connection = private_connection - if update_mask is not None: - request.update_mask = update_mask + if name is not None: + request.name = name + if principal is not None: + request.principal = principal # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.update_private_connection + self._transport.grant_dns_bind_permission ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("private_connection.name", request.private_connection.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -5430,27 +10222,26 @@ def sample_update_private_connection(): response = operation.from_gapic( response, self._transport.operations_client, - vmwareengine_resources.PrivateConnection, + vmwareengine_resources.DnsBindPermission, metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. return response - def delete_private_connection( + def get_dns_bind_permission( self, - request: Optional[ - Union[vmwareengine.DeletePrivateConnectionRequest, dict] - ] = None, + request: Optional[Union[vmwareengine.GetDnsBindPermissionRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a ``PrivateConnection`` resource. When a private - connection is deleted for a VMware Engine network, the connected - network becomes inaccessible to that VMware Engine network. + ) -> vmwareengine_resources.DnsBindPermission: + r"""Gets all the principals having bind permission on the + intranet VPC associated with the consumer project + granted by the Grant API. DnsBindPermission is a global + resource and location can only be global. .. code-block:: python @@ -5463,36 +10254,34 @@ def delete_private_connection( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_delete_private_connection(): + def sample_get_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.DeletePrivateConnectionRequest( + request = vmwareengine_v1.GetDnsBindPermissionRequest( name="name_value", ) # Make the request - operation = client.delete_private_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + response = client.get_dns_bind_permission(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.GetDnsBindPermissionRequest, dict]): The request object. Request message for - [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] + [VmwareEngine.GetDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission] name (str): - Required. The resource name of the private connection to - be deleted. Resource names are schemeless URIs that - follow the conventions in + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateConnections/my-connection`` + ``projects/my-project/locations/global/dnsBindPermission`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -5504,19 +10293,12 @@ def sample_delete_private_connection(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } + google.cloud.vmwareengine_v1.types.DnsBindPermission: + DnsBindPermission resource that + contains the accounts having the + consumer DNS bind permission on the + corresponding intranet VPC of the + consumer project. """ # Create or coerce a protobuf request object. @@ -5530,11 +10312,11 @@ def sample_delete_private_connection(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.DeletePrivateConnectionRequest. + # in a vmwareengine.GetDnsBindPermissionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, vmwareengine.DeletePrivateConnectionRequest): - request = vmwareengine.DeletePrivateConnectionRequest(request) + if not isinstance(request, vmwareengine.GetDnsBindPermissionRequest): + request = vmwareengine.GetDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -5542,9 +10324,7 @@ def sample_delete_private_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_private_connection - ] + rpc = self._transport._wrapped_methods[self._transport.get_dns_bind_permission] # Certain fields should be provided within the metadata header; # add these here. @@ -5560,30 +10340,26 @@ def sample_delete_private_connection(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=vmwareengine.OperationMetadata, - ) - # Done; return the response. return response - def list_private_connection_peering_routes( + def revoke_dns_bind_permission( self, request: Optional[ - Union[vmwareengine.ListPrivateConnectionPeeringRoutesRequest, dict] + Union[vmwareengine.RevokeDnsBindPermissionRequest, dict] ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + principal: Optional[vmwareengine_resources.Principal] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPrivateConnectionPeeringRoutesPager: - r"""Lists the private connection routes exchanged over a - peering connection. + ) -> operation.Operation: + r"""Revokes the bind permission from the customer + provided principal(user / service account) on the + intranet VPC associated with the consumer project. + DnsBindPermission is a global resource and location can + only be global. .. code-block:: python @@ -5596,35 +10372,54 @@ def list_private_connection_peering_routes( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import vmwareengine_v1 - def sample_list_private_connection_peering_routes(): + def sample_revoke_dns_bind_permission(): # Create a client client = vmwareengine_v1.VmwareEngineClient() # Initialize request argument(s) - request = vmwareengine_v1.ListPrivateConnectionPeeringRoutesRequest( - parent="parent_value", + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.RevokeDnsBindPermissionRequest( + name="name_value", + principal=principal, ) # Make the request - page_result = client.list_private_connection_peering_routes(request=request) + operation = client.revoke_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest, dict]): + request (Union[google.cloud.vmwareengine_v1.types.RevokeDnsBindPermissionRequest, dict]): The request object. Request message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] - parent (str): - Required. The resource name of the private connection to - retrieve peering routes from. Resource names are - schemeless URIs that follow the conventions in + [VmwareEngine.RevokeDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission] + name (str): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names + are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-west1/privateConnections/my-connection`` + ``projects/my-project/locations/global/dnsBindPermission`` - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + principal (google.cloud.vmwareengine_v1.types.Principal): + Required. The consumer provided + user/service account which needs to be + granted permission to bind with the + intranet VPC corresponding to the + consumer project. + + This corresponds to the ``principal`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -5634,18 +10429,18 @@ def sample_list_private_connection_peering_routes(): sent along with the request as metadata. Returns: - google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesPager: - Response message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be :class:`google.cloud.vmwareengine_v1.types.DnsBindPermission` DnsBindPermission resource that contains the accounts having the consumer DNS + bind permission on the corresponding intranet VPC of + the consumer project. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, principal]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5653,28 +10448,28 @@ def sample_list_private_connection_peering_routes(): ) # Minor optimization to avoid making a copy if the user passes - # in a vmwareengine.ListPrivateConnectionPeeringRoutesRequest. + # in a vmwareengine.RevokeDnsBindPermissionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance( - request, vmwareengine.ListPrivateConnectionPeeringRoutesRequest - ): - request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest(request) + if not isinstance(request, vmwareengine.RevokeDnsBindPermissionRequest): + request = vmwareengine.RevokeDnsBindPermissionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if principal is not None: + request.principal = principal # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.list_private_connection_peering_routes + self._transport.revoke_dns_bind_permission ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -5685,13 +10480,12 @@ def sample_list_private_connection_peering_routes(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPrivateConnectionPeeringRoutesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vmwareengine_resources.DnsBindPermission, + metadata_type=vmwareengine.OperationMetadata, ) # Done; return the response. diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py index 3eb2fb488989..978b1cb2466c 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/pagers.py @@ -283,29 +283,933 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListNodesPager: + """A pager for iterating through ``list_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListNodesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``nodes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNodes`` requests and continue to iterate + through the ``nodes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListNodesResponse], + request: vmwareengine.ListNodesRequest, + response: vmwareengine.ListNodesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListNodesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListNodesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListNodesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListNodesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.Node]: + for page in self.pages: + yield from page.nodes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNodesAsyncPager: + """A pager for iterating through ``list_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListNodesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``nodes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNodes`` requests and continue to iterate + through the ``nodes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListNodesResponse]], + request: vmwareengine.ListNodesRequest, + response: vmwareengine.ListNodesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListNodesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListNodesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListNodesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[vmwareengine.ListNodesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.Node]: + async def async_generator(): + async for page in self.pages: + for response in page.nodes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExternalAddressesPager: + """A pager for iterating through ``list_external_addresses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``external_addresses`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExternalAddresses`` requests and continue to iterate + through the ``external_addresses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListExternalAddressesResponse], + request: vmwareengine.ListExternalAddressesRequest, + response: vmwareengine.ListExternalAddressesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListExternalAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListExternalAddressesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.ExternalAddress]: + for page in self.pages: + yield from page.external_addresses + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExternalAddressesAsyncPager: + """A pager for iterating through ``list_external_addresses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``external_addresses`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExternalAddresses`` requests and continue to iterate + through the ``external_addresses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListExternalAddressesResponse]], + request: vmwareengine.ListExternalAddressesRequest, + response: vmwareengine.ListExternalAddressesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListExternalAddressesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListExternalAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[vmwareengine.ListExternalAddressesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.ExternalAddress]: + async def async_generator(): + async for page in self.pages: + for response in page.external_addresses: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchNetworkPolicyExternalAddressesPager: + """A pager for iterating through ``fetch_network_policy_external_addresses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``external_addresses`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchNetworkPolicyExternalAddresses`` requests and continue to iterate + through the ``external_addresses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.FetchNetworkPolicyExternalAddressesResponse], + request: vmwareengine.FetchNetworkPolicyExternalAddressesRequest, + response: vmwareengine.FetchNetworkPolicyExternalAddressesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[vmwareengine.FetchNetworkPolicyExternalAddressesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.ExternalAddress]: + for page in self.pages: + yield from page.external_addresses + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchNetworkPolicyExternalAddressesAsyncPager: + """A pager for iterating through ``fetch_network_policy_external_addresses`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``external_addresses`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchNetworkPolicyExternalAddresses`` requests and continue to iterate + through the ``external_addresses`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vmwareengine.FetchNetworkPolicyExternalAddressesResponse] + ], + request: vmwareengine.FetchNetworkPolicyExternalAddressesRequest, + response: vmwareengine.FetchNetworkPolicyExternalAddressesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vmwareengine.FetchNetworkPolicyExternalAddressesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.ExternalAddress]: + async def async_generator(): + async for page in self.pages: + for response in page.external_addresses: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListSubnetsPager: """A pager for iterating through ``list_subnets`` requests. This class thinly wraps an initial - :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` object, and + :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subnets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubnets`` requests and continue to iterate + through the ``subnets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListSubnetsResponse], + request: vmwareengine.ListSubnetsRequest, + response: vmwareengine.ListSubnetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListSubnetsRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListSubnetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListSubnetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListSubnetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.Subnet]: + for page in self.pages: + yield from page.subnets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubnetsAsyncPager: + """A pager for iterating through ``list_subnets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subnets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubnets`` requests and continue to iterate + through the ``subnets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListSubnetsResponse]], + request: vmwareengine.ListSubnetsRequest, + response: vmwareengine.ListSubnetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListSubnetsRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListSubnetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListSubnetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[vmwareengine.ListSubnetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.Subnet]: + async def async_generator(): + async for page in self.pages: + for response in page.subnets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExternalAccessRulesPager: + """A pager for iterating through ``list_external_access_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``external_access_rules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExternalAccessRules`` requests and continue to iterate + through the ``external_access_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListExternalAccessRulesResponse], + request: vmwareengine.ListExternalAccessRulesRequest, + response: vmwareengine.ListExternalAccessRulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListExternalAccessRulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListExternalAccessRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.ExternalAccessRule]: + for page in self.pages: + yield from page.external_access_rules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExternalAccessRulesAsyncPager: + """A pager for iterating through ``list_external_access_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``external_access_rules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExternalAccessRules`` requests and continue to iterate + through the ``external_access_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListExternalAccessRulesResponse]], + request: vmwareengine.ListExternalAccessRulesRequest, + response: vmwareengine.ListExternalAccessRulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListExternalAccessRulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListExternalAccessRulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vmwareengine.ListExternalAccessRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.ExternalAccessRule]: + async def async_generator(): + async for page in self.pages: + for response in page.external_access_rules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLoggingServersPager: + """A pager for iterating through ``list_logging_servers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListLoggingServersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``logging_servers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLoggingServers`` requests and continue to iterate + through the ``logging_servers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListLoggingServersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListLoggingServersResponse], + request: vmwareengine.ListLoggingServersRequest, + response: vmwareengine.ListLoggingServersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListLoggingServersRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListLoggingServersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListLoggingServersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListLoggingServersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.LoggingServer]: + for page in self.pages: + yield from page.logging_servers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLoggingServersAsyncPager: + """A pager for iterating through ``list_logging_servers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListLoggingServersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``logging_servers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLoggingServers`` requests and continue to iterate + through the ``logging_servers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListLoggingServersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListLoggingServersResponse]], + request: vmwareengine.ListLoggingServersRequest, + response: vmwareengine.ListLoggingServersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListLoggingServersRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListLoggingServersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListLoggingServersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[vmwareengine.ListLoggingServersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.LoggingServer]: + async def async_generator(): + async for page in self.pages: + for response in page.logging_servers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNodeTypesPager: + """A pager for iterating through ``list_node_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``node_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNodeTypes`` requests and continue to iterate + through the ``node_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListNodeTypesResponse], + request: vmwareengine.ListNodeTypesRequest, + response: vmwareengine.ListNodeTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListNodeTypesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListNodeTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListNodeTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListNodeTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.NodeType]: + for page in self.pages: + yield from page.node_types + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNodeTypesAsyncPager: + """A pager for iterating through ``list_node_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``node_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNodeTypes`` requests and continue to iterate + through the ``node_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vmwareengine.ListNodeTypesResponse]], + request: vmwareengine.ListNodeTypesRequest, + response: vmwareengine.ListNodeTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListNodeTypesRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListNodeTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListNodeTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[vmwareengine.ListNodeTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.NodeType]: + async def async_generator(): + async for page in self.pages: + for response in page.node_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNetworkPeeringsPager: + """A pager for iterating through ``list_network_peerings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse` object, and provides an ``__iter__`` method to iterate through its - ``subnets`` field. + ``network_peerings`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListSubnets`` requests and continue to iterate - through the ``subnets`` field on the + ``ListNetworkPeerings`` requests and continue to iterate + through the ``network_peerings`` field on the corresponding responses. - All the usual :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., vmwareengine.ListSubnetsResponse], - request: vmwareengine.ListSubnetsRequest, - response: vmwareengine.ListSubnetsResponse, + method: Callable[..., vmwareengine.ListNetworkPeeringsResponse], + request: vmwareengine.ListNetworkPeeringsRequest, + response: vmwareengine.ListNetworkPeeringsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -314,15 +1218,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.vmwareengine_v1.types.ListSubnetsRequest): + request (google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest): The initial request object. - response (google.cloud.vmwareengine_v1.types.ListSubnetsResponse): + response (google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = vmwareengine.ListSubnetsRequest(request) + self._request = vmwareengine.ListNetworkPeeringsRequest(request) self._response = response self._metadata = metadata @@ -330,44 +1234,44 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[vmwareengine.ListSubnetsResponse]: + def pages(self) -> Iterator[vmwareengine.ListNetworkPeeringsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine_resources.Subnet]: + def __iter__(self) -> Iterator[vmwareengine_resources.NetworkPeering]: for page in self.pages: - yield from page.subnets + yield from page.network_peerings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListSubnetsAsyncPager: - """A pager for iterating through ``list_subnets`` requests. +class ListNetworkPeeringsAsyncPager: + """A pager for iterating through ``list_network_peerings`` requests. This class thinly wraps an initial - :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` object, and + :class:`google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse` object, and provides an ``__aiter__`` method to iterate through its - ``subnets`` field. + ``network_peerings`` field. If there are more pages, the ``__aiter__`` method will make additional - ``ListSubnets`` requests and continue to iterate - through the ``subnets`` field on the + ``ListNetworkPeerings`` requests and continue to iterate + through the ``network_peerings`` field on the corresponding responses. - All the usual :class:`google.cloud.vmwareengine_v1.types.ListSubnetsResponse` + All the usual :class:`google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., Awaitable[vmwareengine.ListSubnetsResponse]], - request: vmwareengine.ListSubnetsRequest, - response: vmwareengine.ListSubnetsResponse, + method: Callable[..., Awaitable[vmwareengine.ListNetworkPeeringsResponse]], + request: vmwareengine.ListNetworkPeeringsRequest, + response: vmwareengine.ListNetworkPeeringsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -376,15 +1280,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.vmwareengine_v1.types.ListSubnetsRequest): + request (google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest): The initial request object. - response (google.cloud.vmwareengine_v1.types.ListSubnetsResponse): + response (google.cloud.vmwareengine_v1.types.ListNetworkPeeringsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = vmwareengine.ListSubnetsRequest(request) + self._request = vmwareengine.ListNetworkPeeringsRequest(request) self._response = response self._metadata = metadata @@ -392,17 +1296,17 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[vmwareengine.ListSubnetsResponse]: + async def pages(self) -> AsyncIterator[vmwareengine.ListNetworkPeeringsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine_resources.Subnet]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.NetworkPeering]: async def async_generator(): async for page in self.pages: - for response in page.subnets: + for response in page.network_peerings: yield response return async_generator() @@ -411,29 +1315,29 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListNodeTypesPager: - """A pager for iterating through ``list_node_types`` requests. +class ListPeeringRoutesPager: + """A pager for iterating through ``list_peering_routes`` requests. This class thinly wraps an initial - :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` object, and + :class:`google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse` object, and provides an ``__iter__`` method to iterate through its - ``node_types`` field. + ``peering_routes`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListNodeTypes`` requests and continue to iterate - through the ``node_types`` field on the + ``ListPeeringRoutes`` requests and continue to iterate + through the ``peering_routes`` field on the corresponding responses. - All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` + All the usual :class:`google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., vmwareengine.ListNodeTypesResponse], - request: vmwareengine.ListNodeTypesRequest, - response: vmwareengine.ListNodeTypesResponse, + method: Callable[..., vmwareengine.ListPeeringRoutesResponse], + request: vmwareengine.ListPeeringRoutesRequest, + response: vmwareengine.ListPeeringRoutesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -442,15 +1346,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.vmwareengine_v1.types.ListNodeTypesRequest): + request (google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest): The initial request object. - response (google.cloud.vmwareengine_v1.types.ListNodeTypesResponse): + response (google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = vmwareengine.ListNodeTypesRequest(request) + self._request = vmwareengine.ListPeeringRoutesRequest(request) self._response = response self._metadata = metadata @@ -458,44 +1362,44 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[vmwareengine.ListNodeTypesResponse]: + def pages(self) -> Iterator[vmwareengine.ListPeeringRoutesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterator[vmwareengine_resources.NodeType]: + def __iter__(self) -> Iterator[vmwareengine_resources.PeeringRoute]: for page in self.pages: - yield from page.node_types + yield from page.peering_routes def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListNodeTypesAsyncPager: - """A pager for iterating through ``list_node_types`` requests. +class ListPeeringRoutesAsyncPager: + """A pager for iterating through ``list_peering_routes`` requests. This class thinly wraps an initial - :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` object, and + :class:`google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse` object, and provides an ``__aiter__`` method to iterate through its - ``node_types`` field. + ``peering_routes`` field. If there are more pages, the ``__aiter__`` method will make additional - ``ListNodeTypes`` requests and continue to iterate - through the ``node_types`` field on the + ``ListPeeringRoutes`` requests and continue to iterate + through the ``peering_routes`` field on the corresponding responses. - All the usual :class:`google.cloud.vmwareengine_v1.types.ListNodeTypesResponse` + All the usual :class:`google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., Awaitable[vmwareengine.ListNodeTypesResponse]], - request: vmwareengine.ListNodeTypesRequest, - response: vmwareengine.ListNodeTypesResponse, + method: Callable[..., Awaitable[vmwareengine.ListPeeringRoutesResponse]], + request: vmwareengine.ListPeeringRoutesRequest, + response: vmwareengine.ListPeeringRoutesResponse, *, metadata: Sequence[Tuple[str, str]] = () ): @@ -504,15 +1408,15 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.vmwareengine_v1.types.ListNodeTypesRequest): + request (google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest): The initial request object. - response (google.cloud.vmwareengine_v1.types.ListNodeTypesResponse): + response (google.cloud.vmwareengine_v1.types.ListPeeringRoutesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method - self._request = vmwareengine.ListNodeTypesRequest(request) + self._request = vmwareengine.ListPeeringRoutesRequest(request) self._response = response self._metadata = metadata @@ -520,17 +1424,17 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[vmwareengine.ListNodeTypesResponse]: + async def pages(self) -> AsyncIterator[vmwareengine.ListPeeringRoutesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[vmwareengine_resources.NodeType]: + def __aiter__(self) -> AsyncIterator[vmwareengine_resources.PeeringRoute]: async def async_generator(): async for page in self.pages: - for response in page.node_types: + for response in page.peering_routes: yield response return async_generator() @@ -795,6 +1699,140 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListManagementDnsZoneBindingsPager: + """A pager for iterating through ``list_management_dns_zone_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``management_dns_zone_bindings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListManagementDnsZoneBindings`` requests and continue to iterate + through the ``management_dns_zone_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vmwareengine.ListManagementDnsZoneBindingsResponse], + request: vmwareengine.ListManagementDnsZoneBindingsRequest, + response: vmwareengine.ListManagementDnsZoneBindingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListManagementDnsZoneBindingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vmwareengine.ListManagementDnsZoneBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vmwareengine_resources.ManagementDnsZoneBinding]: + for page in self.pages: + yield from page.management_dns_zone_bindings + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListManagementDnsZoneBindingsAsyncPager: + """A pager for iterating through ``list_management_dns_zone_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``management_dns_zone_bindings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListManagementDnsZoneBindings`` requests and continue to iterate + through the ``management_dns_zone_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vmwareengine.ListManagementDnsZoneBindingsResponse] + ], + request: vmwareengine.ListManagementDnsZoneBindingsRequest, + response: vmwareengine.ListManagementDnsZoneBindingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest): + The initial request object. + response (google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vmwareengine.ListManagementDnsZoneBindingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vmwareengine.ListManagementDnsZoneBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[vmwareengine_resources.ManagementDnsZoneBinding]: + async def async_generator(): + async for page in self.pages: + for response in page.management_dns_zone_bindings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListVmwareEngineNetworksPager: """A pager for iterating through ``list_vmware_engine_networks`` requests. diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py index 0aaf2c3af2be..115fdad0d19b 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/base.py @@ -217,6 +217,82 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_nodes: gapic_v1.method.wrap_method( + self.list_nodes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.get_node: gapic_v1.method.wrap_method( + self.get_node, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.list_external_addresses: gapic_v1.method.wrap_method( + self.list_external_addresses, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.fetch_network_policy_external_addresses: gapic_v1.method.wrap_method( + self.fetch_network_policy_external_addresses, + default_timeout=None, + client_info=client_info, + ), + self.get_external_address: gapic_v1.method.wrap_method( + self.get_external_address, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.create_external_address: gapic_v1.method.wrap_method( + self.create_external_address, + default_timeout=None, + client_info=client_info, + ), + self.update_external_address: gapic_v1.method.wrap_method( + self.update_external_address, + default_timeout=None, + client_info=client_info, + ), + self.delete_external_address: gapic_v1.method.wrap_method( + self.delete_external_address, + default_timeout=None, + client_info=client_info, + ), self.list_subnets: gapic_v1.method.wrap_method( self.list_subnets, default_retry=retries.Retry( @@ -250,6 +326,92 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_external_access_rules: gapic_v1.method.wrap_method( + self.list_external_access_rules, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.get_external_access_rule: gapic_v1.method.wrap_method( + self.get_external_access_rule, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.create_external_access_rule: gapic_v1.method.wrap_method( + self.create_external_access_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_external_access_rule: gapic_v1.method.wrap_method( + self.update_external_access_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_external_access_rule: gapic_v1.method.wrap_method( + self.delete_external_access_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_logging_servers: gapic_v1.method.wrap_method( + self.list_logging_servers, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.get_logging_server: gapic_v1.method.wrap_method( + self.get_logging_server, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.create_logging_server: gapic_v1.method.wrap_method( + self.create_logging_server, + default_timeout=None, + client_info=client_info, + ), + self.update_logging_server: gapic_v1.method.wrap_method( + self.update_logging_server, + default_timeout=None, + client_info=client_info, + ), + self.delete_logging_server: gapic_v1.method.wrap_method( + self.delete_logging_server, + default_timeout=None, + client_info=client_info, + ), self.list_node_types: gapic_v1.method.wrap_method( self.list_node_types, default_retry=retries.Retry( @@ -316,6 +478,82 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_dns_forwarding: gapic_v1.method.wrap_method( + self.get_dns_forwarding, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.update_dns_forwarding: gapic_v1.method.wrap_method( + self.update_dns_forwarding, + default_timeout=None, + client_info=client_info, + ), + self.get_network_peering: gapic_v1.method.wrap_method( + self.get_network_peering, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.list_network_peerings: gapic_v1.method.wrap_method( + self.list_network_peerings, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.create_network_peering: gapic_v1.method.wrap_method( + self.create_network_peering, + default_timeout=None, + client_info=client_info, + ), + self.delete_network_peering: gapic_v1.method.wrap_method( + self.delete_network_peering, + default_timeout=None, + client_info=client_info, + ), + self.update_network_peering: gapic_v1.method.wrap_method( + self.update_network_peering, + default_timeout=None, + client_info=client_info, + ), + self.list_peering_routes: gapic_v1.method.wrap_method( + self.list_peering_routes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), self.create_hcx_activation_key: gapic_v1.method.wrap_method( self.create_hcx_activation_key, default_timeout=None, @@ -392,6 +630,54 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_management_dns_zone_bindings: gapic_v1.method.wrap_method( + self.list_management_dns_zone_bindings, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.get_management_dns_zone_binding: gapic_v1.method.wrap_method( + self.get_management_dns_zone_binding, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.create_management_dns_zone_binding: gapic_v1.method.wrap_method( + self.create_management_dns_zone_binding, + default_timeout=None, + client_info=client_info, + ), + self.update_management_dns_zone_binding: gapic_v1.method.wrap_method( + self.update_management_dns_zone_binding, + default_timeout=None, + client_info=client_info, + ), + self.delete_management_dns_zone_binding: gapic_v1.method.wrap_method( + self.delete_management_dns_zone_binding, + default_timeout=None, + client_info=client_info, + ), + self.repair_management_dns_zone_binding: gapic_v1.method.wrap_method( + self.repair_management_dns_zone_binding, + default_timeout=None, + client_info=client_info, + ), self.create_vmware_engine_network: gapic_v1.method.wrap_method( self.create_vmware_engine_network, default_timeout=None, @@ -492,6 +778,30 @@ def _prep_wrapped_messages(self, client_info): default_timeout=120.0, client_info=client_info, ), + self.grant_dns_bind_permission: gapic_v1.method.wrap_method( + self.grant_dns_bind_permission, + default_timeout=None, + client_info=client_info, + ), + self.get_dns_bind_permission: gapic_v1.method.wrap_method( + self.get_dns_bind_permission, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.revoke_dns_bind_permission: gapic_v1.method.wrap_method( + self.revoke_dns_bind_permission, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -618,6 +928,89 @@ def delete_cluster( ]: raise NotImplementedError() + @property + def list_nodes( + self, + ) -> Callable[ + [vmwareengine.ListNodesRequest], + Union[ + vmwareengine.ListNodesResponse, Awaitable[vmwareengine.ListNodesResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_node( + self, + ) -> Callable[ + [vmwareengine.GetNodeRequest], + Union[vmwareengine_resources.Node, Awaitable[vmwareengine_resources.Node]], + ]: + raise NotImplementedError() + + @property + def list_external_addresses( + self, + ) -> Callable[ + [vmwareengine.ListExternalAddressesRequest], + Union[ + vmwareengine.ListExternalAddressesResponse, + Awaitable[vmwareengine.ListExternalAddressesResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_network_policy_external_addresses( + self, + ) -> Callable[ + [vmwareengine.FetchNetworkPolicyExternalAddressesRequest], + Union[ + vmwareengine.FetchNetworkPolicyExternalAddressesResponse, + Awaitable[vmwareengine.FetchNetworkPolicyExternalAddressesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_external_address( + self, + ) -> Callable[ + [vmwareengine.GetExternalAddressRequest], + Union[ + vmwareengine_resources.ExternalAddress, + Awaitable[vmwareengine_resources.ExternalAddress], + ], + ]: + raise NotImplementedError() + + @property + def create_external_address( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAddressRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_external_address( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAddressRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_external_address( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAddressRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_subnets( self, @@ -648,6 +1041,108 @@ def update_subnet( ]: raise NotImplementedError() + @property + def list_external_access_rules( + self, + ) -> Callable[ + [vmwareengine.ListExternalAccessRulesRequest], + Union[ + vmwareengine.ListExternalAccessRulesResponse, + Awaitable[vmwareengine.ListExternalAccessRulesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.GetExternalAccessRuleRequest], + Union[ + vmwareengine_resources.ExternalAccessRule, + Awaitable[vmwareengine_resources.ExternalAccessRule], + ], + ]: + raise NotImplementedError() + + @property + def create_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAccessRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAccessRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAccessRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_logging_servers( + self, + ) -> Callable[ + [vmwareengine.ListLoggingServersRequest], + Union[ + vmwareengine.ListLoggingServersResponse, + Awaitable[vmwareengine.ListLoggingServersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_logging_server( + self, + ) -> Callable[ + [vmwareengine.GetLoggingServerRequest], + Union[ + vmwareengine_resources.LoggingServer, + Awaitable[vmwareengine_resources.LoggingServer], + ], + ]: + raise NotImplementedError() + + @property + def create_logging_server( + self, + ) -> Callable[ + [vmwareengine.CreateLoggingServerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_logging_server( + self, + ) -> Callable[ + [vmwareengine.UpdateLoggingServerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_logging_server( + self, + ) -> Callable[ + [vmwareengine.DeleteLoggingServerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_node_types( self, @@ -713,6 +1208,90 @@ def reset_vcenter_credentials( ]: raise NotImplementedError() + @property + def get_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.GetDnsForwardingRequest], + Union[ + vmwareengine_resources.DnsForwarding, + Awaitable[vmwareengine_resources.DnsForwarding], + ], + ]: + raise NotImplementedError() + + @property + def update_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.UpdateDnsForwardingRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_network_peering( + self, + ) -> Callable[ + [vmwareengine.GetNetworkPeeringRequest], + Union[ + vmwareengine_resources.NetworkPeering, + Awaitable[vmwareengine_resources.NetworkPeering], + ], + ]: + raise NotImplementedError() + + @property + def list_network_peerings( + self, + ) -> Callable[ + [vmwareengine.ListNetworkPeeringsRequest], + Union[ + vmwareengine.ListNetworkPeeringsResponse, + Awaitable[vmwareengine.ListNetworkPeeringsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_network_peering( + self, + ) -> Callable[ + [vmwareengine.CreateNetworkPeeringRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_network_peering( + self, + ) -> Callable[ + [vmwareengine.DeleteNetworkPeeringRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_network_peering( + self, + ) -> Callable[ + [vmwareengine.UpdateNetworkPeeringRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_peering_routes( + self, + ) -> Callable[ + [vmwareengine.ListPeeringRoutesRequest], + Union[ + vmwareengine.ListPeeringRoutesResponse, + Awaitable[vmwareengine.ListPeeringRoutesResponse], + ], + ]: + raise NotImplementedError() + @property def create_hcx_activation_key( self, @@ -797,6 +1376,66 @@ def delete_network_policy( ]: raise NotImplementedError() + @property + def list_management_dns_zone_bindings( + self, + ) -> Callable[ + [vmwareengine.ListManagementDnsZoneBindingsRequest], + Union[ + vmwareengine.ListManagementDnsZoneBindingsResponse, + Awaitable[vmwareengine.ListManagementDnsZoneBindingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.GetManagementDnsZoneBindingRequest], + Union[ + vmwareengine_resources.ManagementDnsZoneBinding, + Awaitable[vmwareengine_resources.ManagementDnsZoneBinding], + ], + ]: + raise NotImplementedError() + + @property + def create_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.CreateManagementDnsZoneBindingRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.UpdateManagementDnsZoneBindingRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.DeleteManagementDnsZoneBindingRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def repair_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.RepairManagementDnsZoneBindingRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_vmware_engine_network( self, @@ -911,6 +1550,36 @@ def list_private_connection_peering_routes( ]: raise NotImplementedError() + @property + def grant_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GrantDnsBindPermissionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GetDnsBindPermissionRequest], + Union[ + vmwareengine_resources.DnsBindPermission, + Awaitable[vmwareengine_resources.DnsBindPermission], + ], + ]: + raise NotImplementedError() + + @property + def revoke_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.RevokeDnsBindPermissionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py index a01d293b94c3..962d2a012ea6 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc.py @@ -311,10 +311,10 @@ def create_private_cloud( r"""Return a callable for the create private cloud method over gRPC. Creates a new ``PrivateCloud`` resource in a given project and - location. Private clouds can only be created in zones, regional - private clouds are not supported. - - Creating a private cloud also creates a `management + location. Private clouds of type ``STANDARD`` and + ``TIME_LIMITED`` are zonal resources, ``STRETCHED`` private + clouds are regional. Creating a private cloud also creates a + `management cluster `__ for that private cloud. @@ -531,9 +531,8 @@ def update_cluster( ) -> Callable[[vmwareengine.UpdateClusterRequest], operations_pb2.Operation]: r"""Return a callable for the update cluster method over gRPC. - Modifies a ``Cluster`` resource. Only the following fields can - be updated: ``node_type_configs.*.node_count``. Only fields - specified in ``updateMask`` are applied. + Modifies a ``Cluster`` resource. Only fields specified in + ``updateMask`` are applied. During operation processing, the resource is temporarily in the ``ACTIVE`` state before the operation fully completes. For that @@ -587,6 +586,246 @@ def delete_cluster( ) return self._stubs["delete_cluster"] + @property + def list_nodes( + self, + ) -> Callable[[vmwareengine.ListNodesRequest], vmwareengine.ListNodesResponse]: + r"""Return a callable for the list nodes method over gRPC. + + Lists nodes in a given cluster. + + Returns: + Callable[[~.ListNodesRequest], + ~.ListNodesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_nodes" not in self._stubs: + self._stubs["list_nodes"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodes", + request_serializer=vmwareengine.ListNodesRequest.serialize, + response_deserializer=vmwareengine.ListNodesResponse.deserialize, + ) + return self._stubs["list_nodes"] + + @property + def get_node( + self, + ) -> Callable[[vmwareengine.GetNodeRequest], vmwareengine_resources.Node]: + r"""Return a callable for the get node method over gRPC. + + Gets details of a single node. + + Returns: + Callable[[~.GetNodeRequest], + ~.Node]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node" not in self._stubs: + self._stubs["get_node"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNode", + request_serializer=vmwareengine.GetNodeRequest.serialize, + response_deserializer=vmwareengine_resources.Node.deserialize, + ) + return self._stubs["get_node"] + + @property + def list_external_addresses( + self, + ) -> Callable[ + [vmwareengine.ListExternalAddressesRequest], + vmwareengine.ListExternalAddressesResponse, + ]: + r"""Return a callable for the list external addresses method over gRPC. + + Lists external IP addresses assigned to VMware + workload VMs in a given private cloud. + + Returns: + Callable[[~.ListExternalAddressesRequest], + ~.ListExternalAddressesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_addresses" not in self._stubs: + self._stubs["list_external_addresses"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListExternalAddresses", + request_serializer=vmwareengine.ListExternalAddressesRequest.serialize, + response_deserializer=vmwareengine.ListExternalAddressesResponse.deserialize, + ) + return self._stubs["list_external_addresses"] + + @property + def fetch_network_policy_external_addresses( + self, + ) -> Callable[ + [vmwareengine.FetchNetworkPolicyExternalAddressesRequest], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse, + ]: + r"""Return a callable for the fetch network policy external + addresses method over gRPC. + + Lists external IP addresses assigned to VMware + workload VMs within the scope of the given network + policy. + + Returns: + Callable[[~.FetchNetworkPolicyExternalAddressesRequest], + ~.FetchNetworkPolicyExternalAddressesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_network_policy_external_addresses" not in self._stubs: + self._stubs[ + "fetch_network_policy_external_addresses" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/FetchNetworkPolicyExternalAddresses", + request_serializer=vmwareengine.FetchNetworkPolicyExternalAddressesRequest.serialize, + response_deserializer=vmwareengine.FetchNetworkPolicyExternalAddressesResponse.deserialize, + ) + return self._stubs["fetch_network_policy_external_addresses"] + + @property + def get_external_address( + self, + ) -> Callable[ + [vmwareengine.GetExternalAddressRequest], vmwareengine_resources.ExternalAddress + ]: + r"""Return a callable for the get external address method over gRPC. + + Gets details of a single external IP address. + + Returns: + Callable[[~.GetExternalAddressRequest], + ~.ExternalAddress]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_address" not in self._stubs: + self._stubs["get_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetExternalAddress", + request_serializer=vmwareengine.GetExternalAddressRequest.serialize, + response_deserializer=vmwareengine_resources.ExternalAddress.deserialize, + ) + return self._stubs["get_external_address"] + + @property + def create_external_address( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAddressRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create external address method over gRPC. + + Creates a new ``ExternalAddress`` resource in a given private + cloud. The network policy that corresponds to the private cloud + must have the external IP address network service enabled + (``NetworkPolicy.external_ip``). + + Returns: + Callable[[~.CreateExternalAddressRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_address" not in self._stubs: + self._stubs["create_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateExternalAddress", + request_serializer=vmwareengine.CreateExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_external_address"] + + @property + def update_external_address( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAddressRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update external address method over gRPC. + + Updates the parameters of a single external IP address. Only + fields specified in ``update_mask`` are applied. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. + + Returns: + Callable[[~.UpdateExternalAddressRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_address" not in self._stubs: + self._stubs["update_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateExternalAddress", + request_serializer=vmwareengine.UpdateExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_external_address"] + + @property + def delete_external_address( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAddressRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete external address method over gRPC. + + Deletes a single external IP address. When you delete + an external IP address, connectivity between the + external IP address and the corresponding internal IP + address is lost. + + Returns: + Callable[[~.DeleteExternalAddressRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_address" not in self._stubs: + self._stubs["delete_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteExternalAddress", + request_serializer=vmwareengine.DeleteExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_external_address"] + @property def list_subnets( self, @@ -648,12 +887,515 @@ def update_subnet( Updates the parameters of a single subnet. Only fields specified in ``update_mask`` are applied. - *Note*: This API is synchronous and always returns a successful - ``google.longrunning.Operation`` (LRO). The returned LRO will - only have ``done`` and ``response`` fields. + *Note*: This API is synchronous and always returns a successful + ``google.longrunning.Operation`` (LRO). The returned LRO will + only have ``done`` and ``response`` fields. + + Returns: + Callable[[~.UpdateSubnetRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subnet" not in self._stubs: + self._stubs["update_subnet"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateSubnet", + request_serializer=vmwareengine.UpdateSubnetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_subnet"] + + @property + def list_external_access_rules( + self, + ) -> Callable[ + [vmwareengine.ListExternalAccessRulesRequest], + vmwareengine.ListExternalAccessRulesResponse, + ]: + r"""Return a callable for the list external access rules method over gRPC. + + Lists ``ExternalAccessRule`` resources in the specified network + policy. + + Returns: + Callable[[~.ListExternalAccessRulesRequest], + ~.ListExternalAccessRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_access_rules" not in self._stubs: + self._stubs["list_external_access_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListExternalAccessRules", + request_serializer=vmwareengine.ListExternalAccessRulesRequest.serialize, + response_deserializer=vmwareengine.ListExternalAccessRulesResponse.deserialize, + ) + return self._stubs["list_external_access_rules"] + + @property + def get_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.GetExternalAccessRuleRequest], + vmwareengine_resources.ExternalAccessRule, + ]: + r"""Return a callable for the get external access rule method over gRPC. + + Gets details of a single external access rule. + + Returns: + Callable[[~.GetExternalAccessRuleRequest], + ~.ExternalAccessRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_access_rule" not in self._stubs: + self._stubs["get_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetExternalAccessRule", + request_serializer=vmwareengine.GetExternalAccessRuleRequest.serialize, + response_deserializer=vmwareengine_resources.ExternalAccessRule.deserialize, + ) + return self._stubs["get_external_access_rule"] + + @property + def create_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAccessRuleRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create external access rule method over gRPC. + + Creates a new external access rule in a given network + policy. + + Returns: + Callable[[~.CreateExternalAccessRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_access_rule" not in self._stubs: + self._stubs["create_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateExternalAccessRule", + request_serializer=vmwareengine.CreateExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_external_access_rule"] + + @property + def update_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAccessRuleRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update external access rule method over gRPC. + + Updates the parameters of a single external access rule. Only + fields specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateExternalAccessRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_access_rule" not in self._stubs: + self._stubs["update_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateExternalAccessRule", + request_serializer=vmwareengine.UpdateExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_external_access_rule"] + + @property + def delete_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAccessRuleRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete external access rule method over gRPC. + + Deletes a single external access rule. + + Returns: + Callable[[~.DeleteExternalAccessRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_access_rule" not in self._stubs: + self._stubs["delete_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteExternalAccessRule", + request_serializer=vmwareengine.DeleteExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_external_access_rule"] + + @property + def list_logging_servers( + self, + ) -> Callable[ + [vmwareengine.ListLoggingServersRequest], + vmwareengine.ListLoggingServersResponse, + ]: + r"""Return a callable for the list logging servers method over gRPC. + + Lists logging servers configured for a given private + cloud. + + Returns: + Callable[[~.ListLoggingServersRequest], + ~.ListLoggingServersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_logging_servers" not in self._stubs: + self._stubs["list_logging_servers"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListLoggingServers", + request_serializer=vmwareengine.ListLoggingServersRequest.serialize, + response_deserializer=vmwareengine.ListLoggingServersResponse.deserialize, + ) + return self._stubs["list_logging_servers"] + + @property + def get_logging_server( + self, + ) -> Callable[ + [vmwareengine.GetLoggingServerRequest], vmwareengine_resources.LoggingServer + ]: + r"""Return a callable for the get logging server method over gRPC. + + Gets details of a logging server. + + Returns: + Callable[[~.GetLoggingServerRequest], + ~.LoggingServer]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_logging_server" not in self._stubs: + self._stubs["get_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetLoggingServer", + request_serializer=vmwareengine.GetLoggingServerRequest.serialize, + response_deserializer=vmwareengine_resources.LoggingServer.deserialize, + ) + return self._stubs["get_logging_server"] + + @property + def create_logging_server( + self, + ) -> Callable[[vmwareengine.CreateLoggingServerRequest], operations_pb2.Operation]: + r"""Return a callable for the create logging server method over gRPC. + + Create a new logging server for a given private + cloud. + + Returns: + Callable[[~.CreateLoggingServerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_logging_server" not in self._stubs: + self._stubs["create_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateLoggingServer", + request_serializer=vmwareengine.CreateLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_logging_server"] + + @property + def update_logging_server( + self, + ) -> Callable[[vmwareengine.UpdateLoggingServerRequest], operations_pb2.Operation]: + r"""Return a callable for the update logging server method over gRPC. + + Updates the parameters of a single logging server. Only fields + specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateLoggingServerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_logging_server" not in self._stubs: + self._stubs["update_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateLoggingServer", + request_serializer=vmwareengine.UpdateLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_logging_server"] + + @property + def delete_logging_server( + self, + ) -> Callable[[vmwareengine.DeleteLoggingServerRequest], operations_pb2.Operation]: + r"""Return a callable for the delete logging server method over gRPC. + + Deletes a single logging server. + + Returns: + Callable[[~.DeleteLoggingServerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_logging_server" not in self._stubs: + self._stubs["delete_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteLoggingServer", + request_serializer=vmwareengine.DeleteLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_logging_server"] + + @property + def list_node_types( + self, + ) -> Callable[ + [vmwareengine.ListNodeTypesRequest], vmwareengine.ListNodeTypesResponse + ]: + r"""Return a callable for the list node types method over gRPC. + + Lists node types + + Returns: + Callable[[~.ListNodeTypesRequest], + ~.ListNodeTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_types" not in self._stubs: + self._stubs["list_node_types"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodeTypes", + request_serializer=vmwareengine.ListNodeTypesRequest.serialize, + response_deserializer=vmwareengine.ListNodeTypesResponse.deserialize, + ) + return self._stubs["list_node_types"] + + @property + def get_node_type( + self, + ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine_resources.NodeType]: + r"""Return a callable for the get node type method over gRPC. + + Gets details of a single ``NodeType``. + + Returns: + Callable[[~.GetNodeTypeRequest], + ~.NodeType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_type" not in self._stubs: + self._stubs["get_node_type"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", + request_serializer=vmwareengine.GetNodeTypeRequest.serialize, + response_deserializer=vmwareengine_resources.NodeType.deserialize, + ) + return self._stubs["get_node_type"] + + @property + def show_nsx_credentials( + self, + ) -> Callable[ + [vmwareengine.ShowNsxCredentialsRequest], vmwareengine_resources.Credentials + ]: + r"""Return a callable for the show nsx credentials method over gRPC. + + Gets details of credentials for NSX appliance. + + Returns: + Callable[[~.ShowNsxCredentialsRequest], + ~.Credentials]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_nsx_credentials" not in self._stubs: + self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", + request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, + ) + return self._stubs["show_nsx_credentials"] + + @property + def show_vcenter_credentials( + self, + ) -> Callable[ + [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine_resources.Credentials + ]: + r"""Return a callable for the show vcenter credentials method over gRPC. + + Gets details of credentials for Vcenter appliance. + + Returns: + Callable[[~.ShowVcenterCredentialsRequest], + ~.Credentials]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_vcenter_credentials" not in self._stubs: + self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", + request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, + ) + return self._stubs["show_vcenter_credentials"] + + @property + def reset_nsx_credentials( + self, + ) -> Callable[[vmwareengine.ResetNsxCredentialsRequest], operations_pb2.Operation]: + r"""Return a callable for the reset nsx credentials method over gRPC. + + Resets credentials of the NSX appliance. + + Returns: + Callable[[~.ResetNsxCredentialsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_nsx_credentials" not in self._stubs: + self._stubs["reset_nsx_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ResetNsxCredentials", + request_serializer=vmwareengine.ResetNsxCredentialsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_nsx_credentials"] + + @property + def reset_vcenter_credentials( + self, + ) -> Callable[ + [vmwareengine.ResetVcenterCredentialsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the reset vcenter credentials method over gRPC. + + Resets credentials of the Vcenter appliance. + + Returns: + Callable[[~.ResetVcenterCredentialsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_vcenter_credentials" not in self._stubs: + self._stubs["reset_vcenter_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ResetVcenterCredentials", + request_serializer=vmwareengine.ResetVcenterCredentialsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_vcenter_credentials"] + + @property + def get_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.GetDnsForwardingRequest], vmwareengine_resources.DnsForwarding + ]: + r"""Return a callable for the get dns forwarding method over gRPC. + + Gets details of the ``DnsForwarding`` config. + + Returns: + Callable[[~.GetDnsForwardingRequest], + ~.DnsForwarding]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_forwarding" not in self._stubs: + self._stubs["get_dns_forwarding"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetDnsForwarding", + request_serializer=vmwareengine.GetDnsForwardingRequest.serialize, + response_deserializer=vmwareengine_resources.DnsForwarding.deserialize, + ) + return self._stubs["get_dns_forwarding"] + + @property + def update_dns_forwarding( + self, + ) -> Callable[[vmwareengine.UpdateDnsForwardingRequest], operations_pb2.Operation]: + r"""Return a callable for the update dns forwarding method over gRPC. + + Updates the parameters of the ``DnsForwarding`` config, like + associated domains. Only fields specified in ``update_mask`` are + applied. Returns: - Callable[[~.UpdateSubnetRequest], + Callable[[~.UpdateDnsForwardingRequest], ~.Operation]: A function that, when called, will call the underlying RPC on the server. @@ -662,27 +1404,31 @@ def update_subnet( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_subnet" not in self._stubs: - self._stubs["update_subnet"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateSubnet", - request_serializer=vmwareengine.UpdateSubnetRequest.serialize, + if "update_dns_forwarding" not in self._stubs: + self._stubs["update_dns_forwarding"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateDnsForwarding", + request_serializer=vmwareengine.UpdateDnsForwardingRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["update_subnet"] + return self._stubs["update_dns_forwarding"] @property - def list_node_types( + def get_network_peering( self, ) -> Callable[ - [vmwareengine.ListNodeTypesRequest], vmwareengine.ListNodeTypesResponse + [vmwareengine.GetNetworkPeeringRequest], vmwareengine_resources.NetworkPeering ]: - r"""Return a callable for the list node types method over gRPC. + r"""Return a callable for the get network peering method over gRPC. - Lists node types + Retrieves a ``NetworkPeering`` resource by its resource name. + The resource contains details of the network peering, such as + peered networks, import and export custom route configurations, + and peering state. NetworkPeering is a global resource and + location can only be global. Returns: - Callable[[~.ListNodeTypesRequest], - ~.ListNodeTypesResponse]: + Callable[[~.GetNetworkPeeringRequest], + ~.NetworkPeering]: A function that, when called, will call the underlying RPC on the server. """ @@ -690,25 +1436,30 @@ def list_node_types( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_node_types" not in self._stubs: - self._stubs["list_node_types"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodeTypes", - request_serializer=vmwareengine.ListNodeTypesRequest.serialize, - response_deserializer=vmwareengine.ListNodeTypesResponse.deserialize, + if "get_network_peering" not in self._stubs: + self._stubs["get_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNetworkPeering", + request_serializer=vmwareengine.GetNetworkPeeringRequest.serialize, + response_deserializer=vmwareengine_resources.NetworkPeering.deserialize, ) - return self._stubs["list_node_types"] + return self._stubs["get_network_peering"] @property - def get_node_type( + def list_network_peerings( self, - ) -> Callable[[vmwareengine.GetNodeTypeRequest], vmwareengine_resources.NodeType]: - r"""Return a callable for the get node type method over gRPC. + ) -> Callable[ + [vmwareengine.ListNetworkPeeringsRequest], + vmwareengine.ListNetworkPeeringsResponse, + ]: + r"""Return a callable for the list network peerings method over gRPC. - Gets details of a single ``NodeType``. + Lists ``NetworkPeering`` resources in a given project. + NetworkPeering is a global resource and location can only be + global. Returns: - Callable[[~.GetNodeTypeRequest], - ~.NodeType]: + Callable[[~.ListNetworkPeeringsRequest], + ~.ListNetworkPeeringsResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -716,27 +1467,28 @@ def get_node_type( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_node_type" not in self._stubs: - self._stubs["get_node_type"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", - request_serializer=vmwareengine.GetNodeTypeRequest.serialize, - response_deserializer=vmwareengine_resources.NodeType.deserialize, + if "list_network_peerings" not in self._stubs: + self._stubs["list_network_peerings"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNetworkPeerings", + request_serializer=vmwareengine.ListNetworkPeeringsRequest.serialize, + response_deserializer=vmwareengine.ListNetworkPeeringsResponse.deserialize, ) - return self._stubs["get_node_type"] + return self._stubs["list_network_peerings"] @property - def show_nsx_credentials( + def create_network_peering( self, - ) -> Callable[ - [vmwareengine.ShowNsxCredentialsRequest], vmwareengine_resources.Credentials - ]: - r"""Return a callable for the show nsx credentials method over gRPC. + ) -> Callable[[vmwareengine.CreateNetworkPeeringRequest], operations_pb2.Operation]: + r"""Return a callable for the create network peering method over gRPC. - Gets details of credentials for NSX appliance. + Creates a new network peering between the peer network and + VMware Engine network provided in a ``NetworkPeering`` resource. + NetworkPeering is a global resource and location can only be + global. Returns: - Callable[[~.ShowNsxCredentialsRequest], - ~.Credentials]: + Callable[[~.CreateNetworkPeeringRequest], + ~.Operation]: A function that, when called, will call the underlying RPC on the server. """ @@ -744,27 +1496,28 @@ def show_nsx_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "show_nsx_credentials" not in self._stubs: - self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", - request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, - response_deserializer=vmwareengine_resources.Credentials.deserialize, + if "create_network_peering" not in self._stubs: + self._stubs["create_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateNetworkPeering", + request_serializer=vmwareengine.CreateNetworkPeeringRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["show_nsx_credentials"] + return self._stubs["create_network_peering"] @property - def show_vcenter_credentials( + def delete_network_peering( self, - ) -> Callable[ - [vmwareengine.ShowVcenterCredentialsRequest], vmwareengine_resources.Credentials - ]: - r"""Return a callable for the show vcenter credentials method over gRPC. + ) -> Callable[[vmwareengine.DeleteNetworkPeeringRequest], operations_pb2.Operation]: + r"""Return a callable for the delete network peering method over gRPC. - Gets details of credentials for Vcenter appliance. + Deletes a ``NetworkPeering`` resource. When a network peering is + deleted for a VMware Engine network, the peer network becomes + inaccessible to that VMware Engine network. NetworkPeering is a + global resource and location can only be global. Returns: - Callable[[~.ShowVcenterCredentialsRequest], - ~.Credentials]: + Callable[[~.DeleteNetworkPeeringRequest], + ~.Operation]: A function that, when called, will call the underlying RPC on the server. """ @@ -772,24 +1525,27 @@ def show_vcenter_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "show_vcenter_credentials" not in self._stubs: - self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", - request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, - response_deserializer=vmwareengine_resources.Credentials.deserialize, + if "delete_network_peering" not in self._stubs: + self._stubs["delete_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteNetworkPeering", + request_serializer=vmwareengine.DeleteNetworkPeeringRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["show_vcenter_credentials"] + return self._stubs["delete_network_peering"] @property - def reset_nsx_credentials( + def update_network_peering( self, - ) -> Callable[[vmwareengine.ResetNsxCredentialsRequest], operations_pb2.Operation]: - r"""Return a callable for the reset nsx credentials method over gRPC. + ) -> Callable[[vmwareengine.UpdateNetworkPeeringRequest], operations_pb2.Operation]: + r"""Return a callable for the update network peering method over gRPC. - Resets credentials of the NSX appliance. + Modifies a ``NetworkPeering`` resource. Only the ``description`` + field can be updated. Only fields specified in ``updateMask`` + are applied. NetworkPeering is a global resource and location + can only be global. Returns: - Callable[[~.ResetNsxCredentialsRequest], + Callable[[~.UpdateNetworkPeeringRequest], ~.Operation]: A function that, when called, will call the underlying RPC on the server. @@ -798,27 +1554,29 @@ def reset_nsx_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "reset_nsx_credentials" not in self._stubs: - self._stubs["reset_nsx_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ResetNsxCredentials", - request_serializer=vmwareengine.ResetNsxCredentialsRequest.serialize, + if "update_network_peering" not in self._stubs: + self._stubs["update_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateNetworkPeering", + request_serializer=vmwareengine.UpdateNetworkPeeringRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["reset_nsx_credentials"] + return self._stubs["update_network_peering"] @property - def reset_vcenter_credentials( + def list_peering_routes( self, ) -> Callable[ - [vmwareengine.ResetVcenterCredentialsRequest], operations_pb2.Operation + [vmwareengine.ListPeeringRoutesRequest], vmwareengine.ListPeeringRoutesResponse ]: - r"""Return a callable for the reset vcenter credentials method over gRPC. + r"""Return a callable for the list peering routes method over gRPC. - Resets credentials of the Vcenter appliance. + Lists the network peering routes exchanged over a + peering connection. NetworkPeering is a global resource + and location can only be global. Returns: - Callable[[~.ResetVcenterCredentialsRequest], - ~.Operation]: + Callable[[~.ListPeeringRoutesRequest], + ~.ListPeeringRoutesResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -826,13 +1584,13 @@ def reset_vcenter_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "reset_vcenter_credentials" not in self._stubs: - self._stubs["reset_vcenter_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ResetVcenterCredentials", - request_serializer=vmwareengine.ResetVcenterCredentialsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + if "list_peering_routes" not in self._stubs: + self._stubs["list_peering_routes"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListPeeringRoutes", + request_serializer=vmwareengine.ListPeeringRoutesRequest.serialize, + response_deserializer=vmwareengine.ListPeeringRoutesResponse.deserialize, ) - return self._stubs["reset_vcenter_credentials"] + return self._stubs["list_peering_routes"] @property def create_hcx_activation_key( @@ -1074,6 +1832,208 @@ def delete_network_policy( ) return self._stubs["delete_network_policy"] + @property + def list_management_dns_zone_bindings( + self, + ) -> Callable[ + [vmwareengine.ListManagementDnsZoneBindingsRequest], + vmwareengine.ListManagementDnsZoneBindingsResponse, + ]: + r"""Return a callable for the list management dns zone + bindings method over gRPC. + + Lists Consumer VPCs bound to Management DNS Zone of a + given private cloud. + + Returns: + Callable[[~.ListManagementDnsZoneBindingsRequest], + ~.ListManagementDnsZoneBindingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_management_dns_zone_bindings" not in self._stubs: + self._stubs[ + "list_management_dns_zone_bindings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListManagementDnsZoneBindings", + request_serializer=vmwareengine.ListManagementDnsZoneBindingsRequest.serialize, + response_deserializer=vmwareengine.ListManagementDnsZoneBindingsResponse.deserialize, + ) + return self._stubs["list_management_dns_zone_bindings"] + + @property + def get_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.GetManagementDnsZoneBindingRequest], + vmwareengine_resources.ManagementDnsZoneBinding, + ]: + r"""Return a callable for the get management dns zone + binding method over gRPC. + + Retrieves a 'ManagementDnsZoneBinding' resource by + its resource name. + + Returns: + Callable[[~.GetManagementDnsZoneBindingRequest], + ~.ManagementDnsZoneBinding]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "get_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetManagementDnsZoneBinding", + request_serializer=vmwareengine.GetManagementDnsZoneBindingRequest.serialize, + response_deserializer=vmwareengine_resources.ManagementDnsZoneBinding.deserialize, + ) + return self._stubs["get_management_dns_zone_binding"] + + @property + def create_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.CreateManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create management dns zone + binding method over gRPC. + + Creates a new ``ManagementDnsZoneBinding`` resource in a private + cloud. This RPC creates the DNS binding and the resource that + represents the DNS binding of the consumer VPC network to the + management DNS zone. A management DNS zone is the Cloud DNS + cross-project binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP addresses + for the private cloud's ESXi hosts and management VM appliances + like vCenter and NSX Manager. + + Returns: + Callable[[~.CreateManagementDnsZoneBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "create_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateManagementDnsZoneBinding", + request_serializer=vmwareengine.CreateManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_management_dns_zone_binding"] + + @property + def update_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.UpdateManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update management dns zone + binding method over gRPC. + + Updates a ``ManagementDnsZoneBinding`` resource. Only fields + specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateManagementDnsZoneBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "update_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateManagementDnsZoneBinding", + request_serializer=vmwareengine.UpdateManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_management_dns_zone_binding"] + + @property + def delete_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.DeleteManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete management dns zone + binding method over gRPC. + + Deletes a ``ManagementDnsZoneBinding`` resource. When a + management DNS zone binding is deleted, the corresponding + consumer VPC network is no longer bound to the management DNS + zone. + + Returns: + Callable[[~.DeleteManagementDnsZoneBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "delete_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteManagementDnsZoneBinding", + request_serializer=vmwareengine.DeleteManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_management_dns_zone_binding"] + + @property + def repair_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.RepairManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + r"""Return a callable for the repair management dns zone + binding method over gRPC. + + Retries to create a ``ManagementDnsZoneBinding`` resource that + is in failed state. + + Returns: + Callable[[~.RepairManagementDnsZoneBindingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "repair_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "repair_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/RepairManagementDnsZoneBinding", + request_serializer=vmwareengine.RepairManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["repair_management_dns_zone_binding"] + @property def create_vmware_engine_network( self, @@ -1410,6 +2370,102 @@ def list_private_connection_peering_routes( ) return self._stubs["list_private_connection_peering_routes"] + @property + def grant_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GrantDnsBindPermissionRequest], operations_pb2.Operation + ]: + r"""Return a callable for the grant dns bind permission method over gRPC. + + Grants the bind permission to the customer provided + principal(user / service account) to bind their DNS zone + with the intranet VPC associated with the project. + DnsBindPermission is a global resource and location can + only be global. + + Returns: + Callable[[~.GrantDnsBindPermissionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "grant_dns_bind_permission" not in self._stubs: + self._stubs["grant_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GrantDnsBindPermission", + request_serializer=vmwareengine.GrantDnsBindPermissionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["grant_dns_bind_permission"] + + @property + def get_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GetDnsBindPermissionRequest], + vmwareengine_resources.DnsBindPermission, + ]: + r"""Return a callable for the get dns bind permission method over gRPC. + + Gets all the principals having bind permission on the + intranet VPC associated with the consumer project + granted by the Grant API. DnsBindPermission is a global + resource and location can only be global. + + Returns: + Callable[[~.GetDnsBindPermissionRequest], + ~.DnsBindPermission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_bind_permission" not in self._stubs: + self._stubs["get_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetDnsBindPermission", + request_serializer=vmwareengine.GetDnsBindPermissionRequest.serialize, + response_deserializer=vmwareengine_resources.DnsBindPermission.deserialize, + ) + return self._stubs["get_dns_bind_permission"] + + @property + def revoke_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.RevokeDnsBindPermissionRequest], operations_pb2.Operation + ]: + r"""Return a callable for the revoke dns bind permission method over gRPC. + + Revokes the bind permission from the customer + provided principal(user / service account) on the + intranet VPC associated with the consumer project. + DnsBindPermission is a global resource and location can + only be global. + + Returns: + Callable[[~.RevokeDnsBindPermissionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "revoke_dns_bind_permission" not in self._stubs: + self._stubs["revoke_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/RevokeDnsBindPermission", + request_serializer=vmwareengine.RevokeDnsBindPermissionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["revoke_dns_bind_permission"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py index fb4a05660414..4777e1060d92 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/grpc_asyncio.py @@ -320,10 +320,10 @@ def create_private_cloud( r"""Return a callable for the create private cloud method over gRPC. Creates a new ``PrivateCloud`` resource in a given project and - location. Private clouds can only be created in zones, regional - private clouds are not supported. - - Creating a private cloud also creates a `management + location. Private clouds of type ``STANDARD`` and + ``TIME_LIMITED`` are zonal resources, ``STRETCHED`` private + clouds are regional. Creating a private cloud also creates a + `management cluster `__ for that private cloud. @@ -552,9 +552,8 @@ def update_cluster( ]: r"""Return a callable for the update cluster method over gRPC. - Modifies a ``Cluster`` resource. Only the following fields can - be updated: ``node_type_configs.*.node_count``. Only fields - specified in ``updateMask`` are applied. + Modifies a ``Cluster`` resource. Only fields specified in + ``updateMask`` are applied. During operation processing, the resource is temporarily in the ``ACTIVE`` state before the operation fully completes. For that @@ -610,6 +609,251 @@ def delete_cluster( ) return self._stubs["delete_cluster"] + @property + def list_nodes( + self, + ) -> Callable[ + [vmwareengine.ListNodesRequest], Awaitable[vmwareengine.ListNodesResponse] + ]: + r"""Return a callable for the list nodes method over gRPC. + + Lists nodes in a given cluster. + + Returns: + Callable[[~.ListNodesRequest], + Awaitable[~.ListNodesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_nodes" not in self._stubs: + self._stubs["list_nodes"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodes", + request_serializer=vmwareengine.ListNodesRequest.serialize, + response_deserializer=vmwareengine.ListNodesResponse.deserialize, + ) + return self._stubs["list_nodes"] + + @property + def get_node( + self, + ) -> Callable[ + [vmwareengine.GetNodeRequest], Awaitable[vmwareengine_resources.Node] + ]: + r"""Return a callable for the get node method over gRPC. + + Gets details of a single node. + + Returns: + Callable[[~.GetNodeRequest], + Awaitable[~.Node]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node" not in self._stubs: + self._stubs["get_node"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNode", + request_serializer=vmwareengine.GetNodeRequest.serialize, + response_deserializer=vmwareengine_resources.Node.deserialize, + ) + return self._stubs["get_node"] + + @property + def list_external_addresses( + self, + ) -> Callable[ + [vmwareengine.ListExternalAddressesRequest], + Awaitable[vmwareengine.ListExternalAddressesResponse], + ]: + r"""Return a callable for the list external addresses method over gRPC. + + Lists external IP addresses assigned to VMware + workload VMs in a given private cloud. + + Returns: + Callable[[~.ListExternalAddressesRequest], + Awaitable[~.ListExternalAddressesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_addresses" not in self._stubs: + self._stubs["list_external_addresses"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListExternalAddresses", + request_serializer=vmwareengine.ListExternalAddressesRequest.serialize, + response_deserializer=vmwareengine.ListExternalAddressesResponse.deserialize, + ) + return self._stubs["list_external_addresses"] + + @property + def fetch_network_policy_external_addresses( + self, + ) -> Callable[ + [vmwareengine.FetchNetworkPolicyExternalAddressesRequest], + Awaitable[vmwareengine.FetchNetworkPolicyExternalAddressesResponse], + ]: + r"""Return a callable for the fetch network policy external + addresses method over gRPC. + + Lists external IP addresses assigned to VMware + workload VMs within the scope of the given network + policy. + + Returns: + Callable[[~.FetchNetworkPolicyExternalAddressesRequest], + Awaitable[~.FetchNetworkPolicyExternalAddressesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_network_policy_external_addresses" not in self._stubs: + self._stubs[ + "fetch_network_policy_external_addresses" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/FetchNetworkPolicyExternalAddresses", + request_serializer=vmwareengine.FetchNetworkPolicyExternalAddressesRequest.serialize, + response_deserializer=vmwareengine.FetchNetworkPolicyExternalAddressesResponse.deserialize, + ) + return self._stubs["fetch_network_policy_external_addresses"] + + @property + def get_external_address( + self, + ) -> Callable[ + [vmwareengine.GetExternalAddressRequest], + Awaitable[vmwareengine_resources.ExternalAddress], + ]: + r"""Return a callable for the get external address method over gRPC. + + Gets details of a single external IP address. + + Returns: + Callable[[~.GetExternalAddressRequest], + Awaitable[~.ExternalAddress]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_address" not in self._stubs: + self._stubs["get_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetExternalAddress", + request_serializer=vmwareengine.GetExternalAddressRequest.serialize, + response_deserializer=vmwareengine_resources.ExternalAddress.deserialize, + ) + return self._stubs["get_external_address"] + + @property + def create_external_address( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAddressRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create external address method over gRPC. + + Creates a new ``ExternalAddress`` resource in a given private + cloud. The network policy that corresponds to the private cloud + must have the external IP address network service enabled + (``NetworkPolicy.external_ip``). + + Returns: + Callable[[~.CreateExternalAddressRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_address" not in self._stubs: + self._stubs["create_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateExternalAddress", + request_serializer=vmwareengine.CreateExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_external_address"] + + @property + def update_external_address( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAddressRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update external address method over gRPC. + + Updates the parameters of a single external IP address. Only + fields specified in ``update_mask`` are applied. + + During operation processing, the resource is temporarily in the + ``ACTIVE`` state before the operation fully completes. For that + period of time, you can't update the resource. Use the operation + status to determine when the processing fully completes. + + Returns: + Callable[[~.UpdateExternalAddressRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_address" not in self._stubs: + self._stubs["update_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateExternalAddress", + request_serializer=vmwareengine.UpdateExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_external_address"] + + @property + def delete_external_address( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAddressRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete external address method over gRPC. + + Deletes a single external IP address. When you delete + an external IP address, connectivity between the + external IP address and the corresponding internal IP + address is lost. + + Returns: + Callable[[~.DeleteExternalAddressRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_address" not in self._stubs: + self._stubs["delete_external_address"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteExternalAddress", + request_serializer=vmwareengine.DeleteExternalAddressRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_external_address"] + @property def list_subnets( self, @@ -691,28 +935,556 @@ def update_subnet( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_subnet" not in self._stubs: - self._stubs["update_subnet"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateSubnet", - request_serializer=vmwareengine.UpdateSubnetRequest.serialize, + if "update_subnet" not in self._stubs: + self._stubs["update_subnet"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateSubnet", + request_serializer=vmwareengine.UpdateSubnetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_subnet"] + + @property + def list_external_access_rules( + self, + ) -> Callable[ + [vmwareengine.ListExternalAccessRulesRequest], + Awaitable[vmwareengine.ListExternalAccessRulesResponse], + ]: + r"""Return a callable for the list external access rules method over gRPC. + + Lists ``ExternalAccessRule`` resources in the specified network + policy. + + Returns: + Callable[[~.ListExternalAccessRulesRequest], + Awaitable[~.ListExternalAccessRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_external_access_rules" not in self._stubs: + self._stubs["list_external_access_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListExternalAccessRules", + request_serializer=vmwareengine.ListExternalAccessRulesRequest.serialize, + response_deserializer=vmwareengine.ListExternalAccessRulesResponse.deserialize, + ) + return self._stubs["list_external_access_rules"] + + @property + def get_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.GetExternalAccessRuleRequest], + Awaitable[vmwareengine_resources.ExternalAccessRule], + ]: + r"""Return a callable for the get external access rule method over gRPC. + + Gets details of a single external access rule. + + Returns: + Callable[[~.GetExternalAccessRuleRequest], + Awaitable[~.ExternalAccessRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_external_access_rule" not in self._stubs: + self._stubs["get_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetExternalAccessRule", + request_serializer=vmwareengine.GetExternalAccessRuleRequest.serialize, + response_deserializer=vmwareengine_resources.ExternalAccessRule.deserialize, + ) + return self._stubs["get_external_access_rule"] + + @property + def create_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAccessRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create external access rule method over gRPC. + + Creates a new external access rule in a given network + policy. + + Returns: + Callable[[~.CreateExternalAccessRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_external_access_rule" not in self._stubs: + self._stubs["create_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateExternalAccessRule", + request_serializer=vmwareengine.CreateExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_external_access_rule"] + + @property + def update_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAccessRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update external access rule method over gRPC. + + Updates the parameters of a single external access rule. Only + fields specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateExternalAccessRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_external_access_rule" not in self._stubs: + self._stubs["update_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateExternalAccessRule", + request_serializer=vmwareengine.UpdateExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_external_access_rule"] + + @property + def delete_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAccessRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete external access rule method over gRPC. + + Deletes a single external access rule. + + Returns: + Callable[[~.DeleteExternalAccessRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_external_access_rule" not in self._stubs: + self._stubs["delete_external_access_rule"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteExternalAccessRule", + request_serializer=vmwareengine.DeleteExternalAccessRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_external_access_rule"] + + @property + def list_logging_servers( + self, + ) -> Callable[ + [vmwareengine.ListLoggingServersRequest], + Awaitable[vmwareengine.ListLoggingServersResponse], + ]: + r"""Return a callable for the list logging servers method over gRPC. + + Lists logging servers configured for a given private + cloud. + + Returns: + Callable[[~.ListLoggingServersRequest], + Awaitable[~.ListLoggingServersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_logging_servers" not in self._stubs: + self._stubs["list_logging_servers"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListLoggingServers", + request_serializer=vmwareengine.ListLoggingServersRequest.serialize, + response_deserializer=vmwareengine.ListLoggingServersResponse.deserialize, + ) + return self._stubs["list_logging_servers"] + + @property + def get_logging_server( + self, + ) -> Callable[ + [vmwareengine.GetLoggingServerRequest], + Awaitable[vmwareengine_resources.LoggingServer], + ]: + r"""Return a callable for the get logging server method over gRPC. + + Gets details of a logging server. + + Returns: + Callable[[~.GetLoggingServerRequest], + Awaitable[~.LoggingServer]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_logging_server" not in self._stubs: + self._stubs["get_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetLoggingServer", + request_serializer=vmwareengine.GetLoggingServerRequest.serialize, + response_deserializer=vmwareengine_resources.LoggingServer.deserialize, + ) + return self._stubs["get_logging_server"] + + @property + def create_logging_server( + self, + ) -> Callable[ + [vmwareengine.CreateLoggingServerRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create logging server method over gRPC. + + Create a new logging server for a given private + cloud. + + Returns: + Callable[[~.CreateLoggingServerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_logging_server" not in self._stubs: + self._stubs["create_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateLoggingServer", + request_serializer=vmwareengine.CreateLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_logging_server"] + + @property + def update_logging_server( + self, + ) -> Callable[ + [vmwareengine.UpdateLoggingServerRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update logging server method over gRPC. + + Updates the parameters of a single logging server. Only fields + specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateLoggingServerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_logging_server" not in self._stubs: + self._stubs["update_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateLoggingServer", + request_serializer=vmwareengine.UpdateLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_logging_server"] + + @property + def delete_logging_server( + self, + ) -> Callable[ + [vmwareengine.DeleteLoggingServerRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete logging server method over gRPC. + + Deletes a single logging server. + + Returns: + Callable[[~.DeleteLoggingServerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_logging_server" not in self._stubs: + self._stubs["delete_logging_server"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteLoggingServer", + request_serializer=vmwareengine.DeleteLoggingServerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_logging_server"] + + @property + def list_node_types( + self, + ) -> Callable[ + [vmwareengine.ListNodeTypesRequest], + Awaitable[vmwareengine.ListNodeTypesResponse], + ]: + r"""Return a callable for the list node types method over gRPC. + + Lists node types + + Returns: + Callable[[~.ListNodeTypesRequest], + Awaitable[~.ListNodeTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_types" not in self._stubs: + self._stubs["list_node_types"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodeTypes", + request_serializer=vmwareengine.ListNodeTypesRequest.serialize, + response_deserializer=vmwareengine.ListNodeTypesResponse.deserialize, + ) + return self._stubs["list_node_types"] + + @property + def get_node_type( + self, + ) -> Callable[ + [vmwareengine.GetNodeTypeRequest], Awaitable[vmwareengine_resources.NodeType] + ]: + r"""Return a callable for the get node type method over gRPC. + + Gets details of a single ``NodeType``. + + Returns: + Callable[[~.GetNodeTypeRequest], + Awaitable[~.NodeType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_type" not in self._stubs: + self._stubs["get_node_type"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", + request_serializer=vmwareengine.GetNodeTypeRequest.serialize, + response_deserializer=vmwareengine_resources.NodeType.deserialize, + ) + return self._stubs["get_node_type"] + + @property + def show_nsx_credentials( + self, + ) -> Callable[ + [vmwareengine.ShowNsxCredentialsRequest], + Awaitable[vmwareengine_resources.Credentials], + ]: + r"""Return a callable for the show nsx credentials method over gRPC. + + Gets details of credentials for NSX appliance. + + Returns: + Callable[[~.ShowNsxCredentialsRequest], + Awaitable[~.Credentials]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_nsx_credentials" not in self._stubs: + self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", + request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, + ) + return self._stubs["show_nsx_credentials"] + + @property + def show_vcenter_credentials( + self, + ) -> Callable[ + [vmwareengine.ShowVcenterCredentialsRequest], + Awaitable[vmwareengine_resources.Credentials], + ]: + r"""Return a callable for the show vcenter credentials method over gRPC. + + Gets details of credentials for Vcenter appliance. + + Returns: + Callable[[~.ShowVcenterCredentialsRequest], + Awaitable[~.Credentials]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_vcenter_credentials" not in self._stubs: + self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", + request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, + response_deserializer=vmwareengine_resources.Credentials.deserialize, + ) + return self._stubs["show_vcenter_credentials"] + + @property + def reset_nsx_credentials( + self, + ) -> Callable[ + [vmwareengine.ResetNsxCredentialsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the reset nsx credentials method over gRPC. + + Resets credentials of the NSX appliance. + + Returns: + Callable[[~.ResetNsxCredentialsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_nsx_credentials" not in self._stubs: + self._stubs["reset_nsx_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ResetNsxCredentials", + request_serializer=vmwareengine.ResetNsxCredentialsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_nsx_credentials"] + + @property + def reset_vcenter_credentials( + self, + ) -> Callable[ + [vmwareengine.ResetVcenterCredentialsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the reset vcenter credentials method over gRPC. + + Resets credentials of the Vcenter appliance. + + Returns: + Callable[[~.ResetVcenterCredentialsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_vcenter_credentials" not in self._stubs: + self._stubs["reset_vcenter_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ResetVcenterCredentials", + request_serializer=vmwareengine.ResetVcenterCredentialsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_vcenter_credentials"] + + @property + def get_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.GetDnsForwardingRequest], + Awaitable[vmwareengine_resources.DnsForwarding], + ]: + r"""Return a callable for the get dns forwarding method over gRPC. + + Gets details of the ``DnsForwarding`` config. + + Returns: + Callable[[~.GetDnsForwardingRequest], + Awaitable[~.DnsForwarding]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_forwarding" not in self._stubs: + self._stubs["get_dns_forwarding"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetDnsForwarding", + request_serializer=vmwareengine.GetDnsForwardingRequest.serialize, + response_deserializer=vmwareengine_resources.DnsForwarding.deserialize, + ) + return self._stubs["get_dns_forwarding"] + + @property + def update_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.UpdateDnsForwardingRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update dns forwarding method over gRPC. + + Updates the parameters of the ``DnsForwarding`` config, like + associated domains. Only fields specified in ``update_mask`` are + applied. + + Returns: + Callable[[~.UpdateDnsForwardingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dns_forwarding" not in self._stubs: + self._stubs["update_dns_forwarding"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateDnsForwarding", + request_serializer=vmwareengine.UpdateDnsForwardingRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["update_subnet"] + return self._stubs["update_dns_forwarding"] @property - def list_node_types( + def get_network_peering( self, ) -> Callable[ - [vmwareengine.ListNodeTypesRequest], - Awaitable[vmwareengine.ListNodeTypesResponse], + [vmwareengine.GetNetworkPeeringRequest], + Awaitable[vmwareengine_resources.NetworkPeering], ]: - r"""Return a callable for the list node types method over gRPC. + r"""Return a callable for the get network peering method over gRPC. - Lists node types + Retrieves a ``NetworkPeering`` resource by its resource name. + The resource contains details of the network peering, such as + peered networks, import and export custom route configurations, + and peering state. NetworkPeering is a global resource and + location can only be global. Returns: - Callable[[~.ListNodeTypesRequest], - Awaitable[~.ListNodeTypesResponse]]: + Callable[[~.GetNetworkPeeringRequest], + Awaitable[~.NetworkPeering]]: A function that, when called, will call the underlying RPC on the server. """ @@ -720,27 +1492,30 @@ def list_node_types( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_node_types" not in self._stubs: - self._stubs["list_node_types"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ListNodeTypes", - request_serializer=vmwareengine.ListNodeTypesRequest.serialize, - response_deserializer=vmwareengine.ListNodeTypesResponse.deserialize, + if "get_network_peering" not in self._stubs: + self._stubs["get_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetNetworkPeering", + request_serializer=vmwareengine.GetNetworkPeeringRequest.serialize, + response_deserializer=vmwareengine_resources.NetworkPeering.deserialize, ) - return self._stubs["list_node_types"] + return self._stubs["get_network_peering"] @property - def get_node_type( + def list_network_peerings( self, ) -> Callable[ - [vmwareengine.GetNodeTypeRequest], Awaitable[vmwareengine_resources.NodeType] + [vmwareengine.ListNetworkPeeringsRequest], + Awaitable[vmwareengine.ListNetworkPeeringsResponse], ]: - r"""Return a callable for the get node type method over gRPC. + r"""Return a callable for the list network peerings method over gRPC. - Gets details of a single ``NodeType``. + Lists ``NetworkPeering`` resources in a given project. + NetworkPeering is a global resource and location can only be + global. Returns: - Callable[[~.GetNodeTypeRequest], - Awaitable[~.NodeType]]: + Callable[[~.ListNetworkPeeringsRequest], + Awaitable[~.ListNetworkPeeringsResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -748,28 +1523,30 @@ def get_node_type( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_node_type" not in self._stubs: - self._stubs["get_node_type"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/GetNodeType", - request_serializer=vmwareengine.GetNodeTypeRequest.serialize, - response_deserializer=vmwareengine_resources.NodeType.deserialize, + if "list_network_peerings" not in self._stubs: + self._stubs["list_network_peerings"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListNetworkPeerings", + request_serializer=vmwareengine.ListNetworkPeeringsRequest.serialize, + response_deserializer=vmwareengine.ListNetworkPeeringsResponse.deserialize, ) - return self._stubs["get_node_type"] + return self._stubs["list_network_peerings"] @property - def show_nsx_credentials( + def create_network_peering( self, ) -> Callable[ - [vmwareengine.ShowNsxCredentialsRequest], - Awaitable[vmwareengine_resources.Credentials], + [vmwareengine.CreateNetworkPeeringRequest], Awaitable[operations_pb2.Operation] ]: - r"""Return a callable for the show nsx credentials method over gRPC. + r"""Return a callable for the create network peering method over gRPC. - Gets details of credentials for NSX appliance. + Creates a new network peering between the peer network and + VMware Engine network provided in a ``NetworkPeering`` resource. + NetworkPeering is a global resource and location can only be + global. Returns: - Callable[[~.ShowNsxCredentialsRequest], - Awaitable[~.Credentials]]: + Callable[[~.CreateNetworkPeeringRequest], + Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. """ @@ -777,28 +1554,30 @@ def show_nsx_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "show_nsx_credentials" not in self._stubs: - self._stubs["show_nsx_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ShowNsxCredentials", - request_serializer=vmwareengine.ShowNsxCredentialsRequest.serialize, - response_deserializer=vmwareengine_resources.Credentials.deserialize, + if "create_network_peering" not in self._stubs: + self._stubs["create_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateNetworkPeering", + request_serializer=vmwareengine.CreateNetworkPeeringRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["show_nsx_credentials"] + return self._stubs["create_network_peering"] @property - def show_vcenter_credentials( + def delete_network_peering( self, ) -> Callable[ - [vmwareengine.ShowVcenterCredentialsRequest], - Awaitable[vmwareengine_resources.Credentials], + [vmwareengine.DeleteNetworkPeeringRequest], Awaitable[operations_pb2.Operation] ]: - r"""Return a callable for the show vcenter credentials method over gRPC. + r"""Return a callable for the delete network peering method over gRPC. - Gets details of credentials for Vcenter appliance. + Deletes a ``NetworkPeering`` resource. When a network peering is + deleted for a VMware Engine network, the peer network becomes + inaccessible to that VMware Engine network. NetworkPeering is a + global resource and location can only be global. Returns: - Callable[[~.ShowVcenterCredentialsRequest], - Awaitable[~.Credentials]]: + Callable[[~.DeleteNetworkPeeringRequest], + Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. """ @@ -806,26 +1585,29 @@ def show_vcenter_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "show_vcenter_credentials" not in self._stubs: - self._stubs["show_vcenter_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ShowVcenterCredentials", - request_serializer=vmwareengine.ShowVcenterCredentialsRequest.serialize, - response_deserializer=vmwareengine_resources.Credentials.deserialize, + if "delete_network_peering" not in self._stubs: + self._stubs["delete_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteNetworkPeering", + request_serializer=vmwareengine.DeleteNetworkPeeringRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["show_vcenter_credentials"] + return self._stubs["delete_network_peering"] @property - def reset_nsx_credentials( + def update_network_peering( self, ) -> Callable[ - [vmwareengine.ResetNsxCredentialsRequest], Awaitable[operations_pb2.Operation] + [vmwareengine.UpdateNetworkPeeringRequest], Awaitable[operations_pb2.Operation] ]: - r"""Return a callable for the reset nsx credentials method over gRPC. + r"""Return a callable for the update network peering method over gRPC. - Resets credentials of the NSX appliance. + Modifies a ``NetworkPeering`` resource. Only the ``description`` + field can be updated. Only fields specified in ``updateMask`` + are applied. NetworkPeering is a global resource and location + can only be global. Returns: - Callable[[~.ResetNsxCredentialsRequest], + Callable[[~.UpdateNetworkPeeringRequest], Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. @@ -834,28 +1616,30 @@ def reset_nsx_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "reset_nsx_credentials" not in self._stubs: - self._stubs["reset_nsx_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ResetNsxCredentials", - request_serializer=vmwareengine.ResetNsxCredentialsRequest.serialize, + if "update_network_peering" not in self._stubs: + self._stubs["update_network_peering"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateNetworkPeering", + request_serializer=vmwareengine.UpdateNetworkPeeringRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["reset_nsx_credentials"] + return self._stubs["update_network_peering"] @property - def reset_vcenter_credentials( + def list_peering_routes( self, ) -> Callable[ - [vmwareengine.ResetVcenterCredentialsRequest], - Awaitable[operations_pb2.Operation], + [vmwareengine.ListPeeringRoutesRequest], + Awaitable[vmwareengine.ListPeeringRoutesResponse], ]: - r"""Return a callable for the reset vcenter credentials method over gRPC. + r"""Return a callable for the list peering routes method over gRPC. - Resets credentials of the Vcenter appliance. + Lists the network peering routes exchanged over a + peering connection. NetworkPeering is a global resource + and location can only be global. Returns: - Callable[[~.ResetVcenterCredentialsRequest], - Awaitable[~.Operation]]: + Callable[[~.ListPeeringRoutesRequest], + Awaitable[~.ListPeeringRoutesResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -863,13 +1647,13 @@ def reset_vcenter_credentials( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "reset_vcenter_credentials" not in self._stubs: - self._stubs["reset_vcenter_credentials"] = self.grpc_channel.unary_unary( - "/google.cloud.vmwareengine.v1.VmwareEngine/ResetVcenterCredentials", - request_serializer=vmwareengine.ResetVcenterCredentialsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + if "list_peering_routes" not in self._stubs: + self._stubs["list_peering_routes"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListPeeringRoutes", + request_serializer=vmwareengine.ListPeeringRoutesRequest.serialize, + response_deserializer=vmwareengine.ListPeeringRoutesResponse.deserialize, ) - return self._stubs["reset_vcenter_credentials"] + return self._stubs["list_peering_routes"] @property def create_hcx_activation_key( @@ -1119,6 +1903,212 @@ def delete_network_policy( ) return self._stubs["delete_network_policy"] + @property + def list_management_dns_zone_bindings( + self, + ) -> Callable[ + [vmwareengine.ListManagementDnsZoneBindingsRequest], + Awaitable[vmwareengine.ListManagementDnsZoneBindingsResponse], + ]: + r"""Return a callable for the list management dns zone + bindings method over gRPC. + + Lists Consumer VPCs bound to Management DNS Zone of a + given private cloud. + + Returns: + Callable[[~.ListManagementDnsZoneBindingsRequest], + Awaitable[~.ListManagementDnsZoneBindingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_management_dns_zone_bindings" not in self._stubs: + self._stubs[ + "list_management_dns_zone_bindings" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/ListManagementDnsZoneBindings", + request_serializer=vmwareengine.ListManagementDnsZoneBindingsRequest.serialize, + response_deserializer=vmwareengine.ListManagementDnsZoneBindingsResponse.deserialize, + ) + return self._stubs["list_management_dns_zone_bindings"] + + @property + def get_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.GetManagementDnsZoneBindingRequest], + Awaitable[vmwareengine_resources.ManagementDnsZoneBinding], + ]: + r"""Return a callable for the get management dns zone + binding method over gRPC. + + Retrieves a 'ManagementDnsZoneBinding' resource by + its resource name. + + Returns: + Callable[[~.GetManagementDnsZoneBindingRequest], + Awaitable[~.ManagementDnsZoneBinding]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "get_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetManagementDnsZoneBinding", + request_serializer=vmwareengine.GetManagementDnsZoneBindingRequest.serialize, + response_deserializer=vmwareengine_resources.ManagementDnsZoneBinding.deserialize, + ) + return self._stubs["get_management_dns_zone_binding"] + + @property + def create_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.CreateManagementDnsZoneBindingRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create management dns zone + binding method over gRPC. + + Creates a new ``ManagementDnsZoneBinding`` resource in a private + cloud. This RPC creates the DNS binding and the resource that + represents the DNS binding of the consumer VPC network to the + management DNS zone. A management DNS zone is the Cloud DNS + cross-project binding zone that VMware Engine creates for each + private cloud. It contains FQDNs and corresponding IP addresses + for the private cloud's ESXi hosts and management VM appliances + like vCenter and NSX Manager. + + Returns: + Callable[[~.CreateManagementDnsZoneBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "create_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/CreateManagementDnsZoneBinding", + request_serializer=vmwareengine.CreateManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_management_dns_zone_binding"] + + @property + def update_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.UpdateManagementDnsZoneBindingRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update management dns zone + binding method over gRPC. + + Updates a ``ManagementDnsZoneBinding`` resource. Only fields + specified in ``update_mask`` are applied. + + Returns: + Callable[[~.UpdateManagementDnsZoneBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "update_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/UpdateManagementDnsZoneBinding", + request_serializer=vmwareengine.UpdateManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_management_dns_zone_binding"] + + @property + def delete_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.DeleteManagementDnsZoneBindingRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete management dns zone + binding method over gRPC. + + Deletes a ``ManagementDnsZoneBinding`` resource. When a + management DNS zone binding is deleted, the corresponding + consumer VPC network is no longer bound to the management DNS + zone. + + Returns: + Callable[[~.DeleteManagementDnsZoneBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "delete_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/DeleteManagementDnsZoneBinding", + request_serializer=vmwareengine.DeleteManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_management_dns_zone_binding"] + + @property + def repair_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.RepairManagementDnsZoneBindingRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the repair management dns zone + binding method over gRPC. + + Retries to create a ``ManagementDnsZoneBinding`` resource that + is in failed state. + + Returns: + Callable[[~.RepairManagementDnsZoneBindingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "repair_management_dns_zone_binding" not in self._stubs: + self._stubs[ + "repair_management_dns_zone_binding" + ] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/RepairManagementDnsZoneBinding", + request_serializer=vmwareengine.RepairManagementDnsZoneBindingRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["repair_management_dns_zone_binding"] + @property def create_vmware_engine_network( self, @@ -1461,6 +2451,104 @@ def list_private_connection_peering_routes( ) return self._stubs["list_private_connection_peering_routes"] + @property + def grant_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GrantDnsBindPermissionRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the grant dns bind permission method over gRPC. + + Grants the bind permission to the customer provided + principal(user / service account) to bind their DNS zone + with the intranet VPC associated with the project. + DnsBindPermission is a global resource and location can + only be global. + + Returns: + Callable[[~.GrantDnsBindPermissionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "grant_dns_bind_permission" not in self._stubs: + self._stubs["grant_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GrantDnsBindPermission", + request_serializer=vmwareengine.GrantDnsBindPermissionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["grant_dns_bind_permission"] + + @property + def get_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GetDnsBindPermissionRequest], + Awaitable[vmwareengine_resources.DnsBindPermission], + ]: + r"""Return a callable for the get dns bind permission method over gRPC. + + Gets all the principals having bind permission on the + intranet VPC associated with the consumer project + granted by the Grant API. DnsBindPermission is a global + resource and location can only be global. + + Returns: + Callable[[~.GetDnsBindPermissionRequest], + Awaitable[~.DnsBindPermission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_bind_permission" not in self._stubs: + self._stubs["get_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/GetDnsBindPermission", + request_serializer=vmwareengine.GetDnsBindPermissionRequest.serialize, + response_deserializer=vmwareengine_resources.DnsBindPermission.deserialize, + ) + return self._stubs["get_dns_bind_permission"] + + @property + def revoke_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.RevokeDnsBindPermissionRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the revoke dns bind permission method over gRPC. + + Revokes the bind permission from the customer + provided principal(user / service account) on the + intranet VPC associated with the consumer project. + DnsBindPermission is a global resource and location can + only be global. + + Returns: + Callable[[~.RevokeDnsBindPermissionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "revoke_dns_bind_permission" not in self._stubs: + self._stubs["revoke_dns_bind_permission"] = self.grpc_channel.unary_unary( + "/google.cloud.vmwareengine.v1.VmwareEngine/RevokeDnsBindPermission", + request_serializer=vmwareengine.RevokeDnsBindPermissionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["revoke_dns_bind_permission"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py index 61568d6d90bd..ca802a3910c9 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/services/vmware_engine/transports/rest.py @@ -82,6 +82,22 @@ def post_create_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_create_external_access_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_external_access_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_external_address(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_external_address(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_hcx_activation_key(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +106,30 @@ def post_create_hcx_activation_key(self, response): logging.log(f"Received response: {response}") return response + def pre_create_logging_server(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_logging_server(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_management_dns_zone_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_management_dns_zone_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_network_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_network_peering(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_network_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -130,6 +170,46 @@ def post_delete_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_external_access_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_external_access_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_external_address(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_external_address(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_logging_server(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_logging_server(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_management_dns_zone_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_management_dns_zone_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_network_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_network_peering(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_network_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -162,6 +242,14 @@ def post_delete_vmware_engine_network(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_network_policy_external_addresses(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_network_policy_external_addresses(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_cluster(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -170,6 +258,38 @@ def post_get_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_get_dns_bind_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dns_bind_permission(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dns_forwarding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dns_forwarding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_external_access_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_external_access_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_external_address(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_external_address(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_hcx_activation_key(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -178,6 +298,30 @@ def post_get_hcx_activation_key(self, response): logging.log(f"Received response: {response}") return response + def pre_get_logging_server(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_logging_server(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_management_dns_zone_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_management_dns_zone_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_network_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_network_peering(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_network_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -186,6 +330,14 @@ def post_get_network_policy(self, response): logging.log(f"Received response: {response}") return response + def pre_get_node(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_node(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_node_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -226,6 +378,14 @@ def post_get_vmware_engine_network(self, response): logging.log(f"Received response: {response}") return response + def pre_grant_dns_bind_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_grant_dns_bind_permission(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_clusters(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -234,6 +394,22 @@ def post_list_clusters(self, response): logging.log(f"Received response: {response}") return response + def pre_list_external_access_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_external_access_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_external_addresses(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_external_addresses(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_hcx_activation_keys(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -242,6 +418,30 @@ def post_list_hcx_activation_keys(self, response): logging.log(f"Received response: {response}") return response + def pre_list_logging_servers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_logging_servers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_management_dns_zone_bindings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_management_dns_zone_bindings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_network_peerings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_network_peerings(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_network_policies(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -250,6 +450,14 @@ def post_list_network_policies(self, response): logging.log(f"Received response: {response}") return response + def pre_list_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_nodes(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_node_types(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -258,6 +466,14 @@ def post_list_node_types(self, response): logging.log(f"Received response: {response}") return response + def pre_list_peering_routes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_peering_routes(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_private_clouds(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -298,6 +514,14 @@ def post_list_vmware_engine_networks(self, response): logging.log(f"Received response: {response}") return response + def pre_repair_management_dns_zone_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_repair_management_dns_zone_binding(self, response): + logging.log(f"Received response: {response}") + return response + def pre_reset_nsx_credentials(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -314,6 +538,14 @@ def post_reset_vcenter_credentials(self, response): logging.log(f"Received response: {response}") return response + def pre_revoke_dns_bind_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_revoke_dns_bind_permission(self, response): + logging.log(f"Received response: {response}") + return response + def pre_show_nsx_credentials(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -346,6 +578,54 @@ def post_update_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_update_dns_forwarding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dns_forwarding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_external_access_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_external_access_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_external_address(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_external_address(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_logging_server(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_logging_server(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_management_dns_zone_binding(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_management_dns_zone_binding(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_network_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_network_peering(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_network_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -415,22 +695,22 @@ def post_create_cluster( """ return response - def pre_create_hcx_activation_key( + def pre_create_external_access_rule( self, - request: vmwareengine.CreateHcxActivationKeyRequest, + request: vmwareengine.CreateExternalAccessRuleRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.CreateHcxActivationKeyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_hcx_activation_key + ) -> Tuple[vmwareengine.CreateExternalAccessRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_external_access_rule Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_create_hcx_activation_key( + def post_create_external_access_rule( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_hcx_activation_key + """Post-rpc interceptor for create_external_access_rule Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -438,22 +718,22 @@ def post_create_hcx_activation_key( """ return response - def pre_create_network_policy( + def pre_create_external_address( self, - request: vmwareengine.CreateNetworkPolicyRequest, + request: vmwareengine.CreateExternalAddressRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.CreateNetworkPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_network_policy + ) -> Tuple[vmwareengine.CreateExternalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_external_address Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_create_network_policy( + def post_create_external_address( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_network_policy + """Post-rpc interceptor for create_external_address Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -461,22 +741,22 @@ def post_create_network_policy( """ return response - def pre_create_private_cloud( + def pre_create_hcx_activation_key( self, - request: vmwareengine.CreatePrivateCloudRequest, + request: vmwareengine.CreateHcxActivationKeyRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.CreatePrivateCloudRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_private_cloud + ) -> Tuple[vmwareengine.CreateHcxActivationKeyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_hcx_activation_key Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_create_private_cloud( + def post_create_hcx_activation_key( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_private_cloud + """Post-rpc interceptor for create_hcx_activation_key Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -484,22 +764,22 @@ def post_create_private_cloud( """ return response - def pre_create_private_connection( + def pre_create_logging_server( self, - request: vmwareengine.CreatePrivateConnectionRequest, + request: vmwareengine.CreateLoggingServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.CreatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_private_connection + ) -> Tuple[vmwareengine.CreateLoggingServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_logging_server Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_create_private_connection( + def post_create_logging_server( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_private_connection + """Post-rpc interceptor for create_logging_server Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -507,24 +787,24 @@ def post_create_private_connection( """ return response - def pre_create_vmware_engine_network( + def pre_create_management_dns_zone_binding( self, - request: vmwareengine.CreateVmwareEngineNetworkRequest, + request: vmwareengine.CreateManagementDnsZoneBindingRequest, metadata: Sequence[Tuple[str, str]], ) -> Tuple[ - vmwareengine.CreateVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] + vmwareengine.CreateManagementDnsZoneBindingRequest, Sequence[Tuple[str, str]] ]: - """Pre-rpc interceptor for create_vmware_engine_network + """Pre-rpc interceptor for create_management_dns_zone_binding Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_create_vmware_engine_network( + def post_create_management_dns_zone_binding( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_vmware_engine_network + """Post-rpc interceptor for create_management_dns_zone_binding Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -532,22 +812,22 @@ def post_create_vmware_engine_network( """ return response - def pre_delete_cluster( + def pre_create_network_peering( self, - request: vmwareengine.DeleteClusterRequest, + request: vmwareengine.CreateNetworkPeeringRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.DeleteClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_cluster + ) -> Tuple[vmwareengine.CreateNetworkPeeringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_network_peering Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_cluster( + def post_create_network_peering( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_cluster + """Post-rpc interceptor for create_network_peering Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -555,22 +835,22 @@ def post_delete_cluster( """ return response - def pre_delete_network_policy( + def pre_create_network_policy( self, - request: vmwareengine.DeleteNetworkPolicyRequest, + request: vmwareengine.CreateNetworkPolicyRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.DeleteNetworkPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_network_policy + ) -> Tuple[vmwareengine.CreateNetworkPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_network_policy Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_network_policy( + def post_create_network_policy( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_network_policy + """Post-rpc interceptor for create_network_policy Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -578,22 +858,22 @@ def post_delete_network_policy( """ return response - def pre_delete_private_cloud( + def pre_create_private_cloud( self, - request: vmwareengine.DeletePrivateCloudRequest, + request: vmwareengine.CreatePrivateCloudRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.DeletePrivateCloudRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_private_cloud + ) -> Tuple[vmwareengine.CreatePrivateCloudRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_private_cloud Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_private_cloud( + def post_create_private_cloud( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_private_cloud + """Post-rpc interceptor for create_private_cloud Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -601,22 +881,22 @@ def post_delete_private_cloud( """ return response - def pre_delete_private_connection( + def pre_create_private_connection( self, - request: vmwareengine.DeletePrivateConnectionRequest, + request: vmwareengine.CreatePrivateConnectionRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.DeletePrivateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_private_connection + ) -> Tuple[vmwareengine.CreatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_private_connection Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_private_connection( + def post_create_private_connection( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_private_connection + """Post-rpc interceptor for create_private_connection Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -624,24 +904,24 @@ def post_delete_private_connection( """ return response - def pre_delete_vmware_engine_network( + def pre_create_vmware_engine_network( self, - request: vmwareengine.DeleteVmwareEngineNetworkRequest, + request: vmwareengine.CreateVmwareEngineNetworkRequest, metadata: Sequence[Tuple[str, str]], ) -> Tuple[ - vmwareengine.DeleteVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] + vmwareengine.CreateVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] ]: - """Pre-rpc interceptor for delete_vmware_engine_network + """Pre-rpc interceptor for create_vmware_engine_network Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_vmware_engine_network( + def post_create_vmware_engine_network( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_vmware_engine_network + """Post-rpc interceptor for create_vmware_engine_network Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -649,22 +929,22 @@ def post_delete_vmware_engine_network( """ return response - def pre_get_cluster( + def pre_delete_cluster( self, - request: vmwareengine.GetClusterRequest, + request: vmwareengine.DeleteClusterRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_cluster + ) -> Tuple[vmwareengine.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_cluster( - self, response: vmwareengine_resources.Cluster - ) -> vmwareengine_resources.Cluster: - """Post-rpc interceptor for get_cluster + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -672,22 +952,22 @@ def post_get_cluster( """ return response - def pre_get_hcx_activation_key( + def pre_delete_external_access_rule( self, - request: vmwareengine.GetHcxActivationKeyRequest, + request: vmwareengine.DeleteExternalAccessRuleRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetHcxActivationKeyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_hcx_activation_key + ) -> Tuple[vmwareengine.DeleteExternalAccessRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_external_access_rule Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_hcx_activation_key( - self, response: vmwareengine_resources.HcxActivationKey - ) -> vmwareengine_resources.HcxActivationKey: - """Post-rpc interceptor for get_hcx_activation_key + def post_delete_external_access_rule( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_external_access_rule Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -695,22 +975,22 @@ def post_get_hcx_activation_key( """ return response - def pre_get_network_policy( + def pre_delete_external_address( self, - request: vmwareengine.GetNetworkPolicyRequest, + request: vmwareengine.DeleteExternalAddressRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetNetworkPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_network_policy + ) -> Tuple[vmwareengine.DeleteExternalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_external_address Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_network_policy( - self, response: vmwareengine_resources.NetworkPolicy - ) -> vmwareengine_resources.NetworkPolicy: - """Post-rpc interceptor for get_network_policy + def post_delete_external_address( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_external_address Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -718,22 +998,22 @@ def post_get_network_policy( """ return response - def pre_get_node_type( + def pre_delete_logging_server( self, - request: vmwareengine.GetNodeTypeRequest, + request: vmwareengine.DeleteLoggingServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetNodeTypeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_node_type + ) -> Tuple[vmwareengine.DeleteLoggingServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_logging_server Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_node_type( - self, response: vmwareengine_resources.NodeType - ) -> vmwareengine_resources.NodeType: - """Post-rpc interceptor for get_node_type + def post_delete_logging_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_logging_server Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -741,22 +1021,24 @@ def post_get_node_type( """ return response - def pre_get_private_cloud( + def pre_delete_management_dns_zone_binding( self, - request: vmwareengine.GetPrivateCloudRequest, + request: vmwareengine.DeleteManagementDnsZoneBindingRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetPrivateCloudRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_private_cloud + ) -> Tuple[ + vmwareengine.DeleteManagementDnsZoneBindingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_management_dns_zone_binding Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_private_cloud( - self, response: vmwareengine_resources.PrivateCloud - ) -> vmwareengine_resources.PrivateCloud: - """Post-rpc interceptor for get_private_cloud + def post_delete_management_dns_zone_binding( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_management_dns_zone_binding Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -764,22 +1046,22 @@ def post_get_private_cloud( """ return response - def pre_get_private_connection( + def pre_delete_network_peering( self, - request: vmwareengine.GetPrivateConnectionRequest, + request: vmwareengine.DeleteNetworkPeeringRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetPrivateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_private_connection + ) -> Tuple[vmwareengine.DeleteNetworkPeeringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_network_peering Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_private_connection( - self, response: vmwareengine_resources.PrivateConnection - ) -> vmwareengine_resources.PrivateConnection: - """Post-rpc interceptor for get_private_connection + def post_delete_network_peering( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_network_peering Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -787,22 +1069,22 @@ def post_get_private_connection( """ return response - def pre_get_subnet( + def pre_delete_network_policy( self, - request: vmwareengine.GetSubnetRequest, + request: vmwareengine.DeleteNetworkPolicyRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetSubnetRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_subnet + ) -> Tuple[vmwareengine.DeleteNetworkPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_network_policy Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_subnet( - self, response: vmwareengine_resources.Subnet - ) -> vmwareengine_resources.Subnet: - """Post-rpc interceptor for get_subnet + def post_delete_network_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_network_policy Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -810,22 +1092,22 @@ def post_get_subnet( """ return response - def pre_get_vmware_engine_network( + def pre_delete_private_cloud( self, - request: vmwareengine.GetVmwareEngineNetworkRequest, + request: vmwareengine.DeletePrivateCloudRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.GetVmwareEngineNetworkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_vmware_engine_network + ) -> Tuple[vmwareengine.DeletePrivateCloudRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_private_cloud Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_vmware_engine_network( - self, response: vmwareengine_resources.VmwareEngineNetwork - ) -> vmwareengine_resources.VmwareEngineNetwork: - """Post-rpc interceptor for get_vmware_engine_network + def post_delete_private_cloud( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_private_cloud Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -833,22 +1115,22 @@ def post_get_vmware_engine_network( """ return response - def pre_list_clusters( + def pre_delete_private_connection( self, - request: vmwareengine.ListClustersRequest, + request: vmwareengine.DeletePrivateConnectionRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListClustersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_clusters + ) -> Tuple[vmwareengine.DeletePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_private_connection Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_clusters( - self, response: vmwareengine.ListClustersResponse - ) -> vmwareengine.ListClustersResponse: - """Post-rpc interceptor for list_clusters + def post_delete_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_private_connection Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -856,22 +1138,24 @@ def post_list_clusters( """ return response - def pre_list_hcx_activation_keys( + def pre_delete_vmware_engine_network( self, - request: vmwareengine.ListHcxActivationKeysRequest, + request: vmwareengine.DeleteVmwareEngineNetworkRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListHcxActivationKeysRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_hcx_activation_keys + ) -> Tuple[ + vmwareengine.DeleteVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_vmware_engine_network Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_hcx_activation_keys( - self, response: vmwareengine.ListHcxActivationKeysResponse - ) -> vmwareengine.ListHcxActivationKeysResponse: - """Post-rpc interceptor for list_hcx_activation_keys + def post_delete_vmware_engine_network( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_vmware_engine_network Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -879,22 +1163,25 @@ def post_list_hcx_activation_keys( """ return response - def pre_list_network_policies( + def pre_fetch_network_policy_external_addresses( self, - request: vmwareengine.ListNetworkPoliciesRequest, + request: vmwareengine.FetchNetworkPolicyExternalAddressesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListNetworkPoliciesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_network_policies + ) -> Tuple[ + vmwareengine.FetchNetworkPolicyExternalAddressesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for fetch_network_policy_external_addresses Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_network_policies( - self, response: vmwareengine.ListNetworkPoliciesResponse - ) -> vmwareengine.ListNetworkPoliciesResponse: - """Post-rpc interceptor for list_network_policies + def post_fetch_network_policy_external_addresses( + self, response: vmwareengine.FetchNetworkPolicyExternalAddressesResponse + ) -> vmwareengine.FetchNetworkPolicyExternalAddressesResponse: + """Post-rpc interceptor for fetch_network_policy_external_addresses Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -902,22 +1189,22 @@ def post_list_network_policies( """ return response - def pre_list_node_types( + def pre_get_cluster( self, - request: vmwareengine.ListNodeTypesRequest, + request: vmwareengine.GetClusterRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListNodeTypesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_node_types + ) -> Tuple[vmwareengine.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_node_types( - self, response: vmwareengine.ListNodeTypesResponse - ) -> vmwareengine.ListNodeTypesResponse: - """Post-rpc interceptor for list_node_types + def post_get_cluster( + self, response: vmwareengine_resources.Cluster + ) -> vmwareengine_resources.Cluster: + """Post-rpc interceptor for get_cluster Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -925,22 +1212,22 @@ def post_list_node_types( """ return response - def pre_list_private_clouds( + def pre_get_dns_bind_permission( self, - request: vmwareengine.ListPrivateCloudsRequest, + request: vmwareengine.GetDnsBindPermissionRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListPrivateCloudsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_private_clouds + ) -> Tuple[vmwareengine.GetDnsBindPermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dns_bind_permission Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_private_clouds( - self, response: vmwareengine.ListPrivateCloudsResponse - ) -> vmwareengine.ListPrivateCloudsResponse: - """Post-rpc interceptor for list_private_clouds + def post_get_dns_bind_permission( + self, response: vmwareengine_resources.DnsBindPermission + ) -> vmwareengine_resources.DnsBindPermission: + """Post-rpc interceptor for get_dns_bind_permission Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -948,25 +1235,22 @@ def post_list_private_clouds( """ return response - def pre_list_private_connection_peering_routes( + def pre_get_dns_forwarding( self, - request: vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + request: vmwareengine.GetDnsForwardingRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - vmwareengine.ListPrivateConnectionPeeringRoutesRequest, - Sequence[Tuple[str, str]], - ]: - """Pre-rpc interceptor for list_private_connection_peering_routes + ) -> Tuple[vmwareengine.GetDnsForwardingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dns_forwarding Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_private_connection_peering_routes( - self, response: vmwareengine.ListPrivateConnectionPeeringRoutesResponse - ) -> vmwareengine.ListPrivateConnectionPeeringRoutesResponse: - """Post-rpc interceptor for list_private_connection_peering_routes + def post_get_dns_forwarding( + self, response: vmwareengine_resources.DnsForwarding + ) -> vmwareengine_resources.DnsForwarding: + """Post-rpc interceptor for get_dns_forwarding Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -974,22 +1258,22 @@ def post_list_private_connection_peering_routes( """ return response - def pre_list_private_connections( + def pre_get_external_access_rule( self, - request: vmwareengine.ListPrivateConnectionsRequest, + request: vmwareengine.GetExternalAccessRuleRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListPrivateConnectionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_private_connections + ) -> Tuple[vmwareengine.GetExternalAccessRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_external_access_rule Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_private_connections( - self, response: vmwareengine.ListPrivateConnectionsResponse - ) -> vmwareengine.ListPrivateConnectionsResponse: - """Post-rpc interceptor for list_private_connections + def post_get_external_access_rule( + self, response: vmwareengine_resources.ExternalAccessRule + ) -> vmwareengine_resources.ExternalAccessRule: + """Post-rpc interceptor for get_external_access_rule Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -997,22 +1281,22 @@ def post_list_private_connections( """ return response - def pre_list_subnets( + def pre_get_external_address( self, - request: vmwareengine.ListSubnetsRequest, + request: vmwareengine.GetExternalAddressRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListSubnetsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_subnets + ) -> Tuple[vmwareengine.GetExternalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_external_address Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_subnets( - self, response: vmwareengine.ListSubnetsResponse - ) -> vmwareengine.ListSubnetsResponse: - """Post-rpc interceptor for list_subnets + def post_get_external_address( + self, response: vmwareengine_resources.ExternalAddress + ) -> vmwareengine_resources.ExternalAddress: + """Post-rpc interceptor for get_external_address Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1020,22 +1304,22 @@ def post_list_subnets( """ return response - def pre_list_vmware_engine_networks( + def pre_get_hcx_activation_key( self, - request: vmwareengine.ListVmwareEngineNetworksRequest, + request: vmwareengine.GetHcxActivationKeyRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ListVmwareEngineNetworksRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_vmware_engine_networks + ) -> Tuple[vmwareengine.GetHcxActivationKeyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_hcx_activation_key Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_vmware_engine_networks( - self, response: vmwareengine.ListVmwareEngineNetworksResponse - ) -> vmwareengine.ListVmwareEngineNetworksResponse: - """Post-rpc interceptor for list_vmware_engine_networks + def post_get_hcx_activation_key( + self, response: vmwareengine_resources.HcxActivationKey + ) -> vmwareengine_resources.HcxActivationKey: + """Post-rpc interceptor for get_hcx_activation_key Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1043,22 +1327,22 @@ def post_list_vmware_engine_networks( """ return response - def pre_reset_nsx_credentials( + def pre_get_logging_server( self, - request: vmwareengine.ResetNsxCredentialsRequest, + request: vmwareengine.GetLoggingServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ResetNsxCredentialsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reset_nsx_credentials + ) -> Tuple[vmwareengine.GetLoggingServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_logging_server Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_reset_nsx_credentials( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for reset_nsx_credentials + def post_get_logging_server( + self, response: vmwareengine_resources.LoggingServer + ) -> vmwareengine_resources.LoggingServer: + """Post-rpc interceptor for get_logging_server Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1066,22 +1350,24 @@ def post_reset_nsx_credentials( """ return response - def pre_reset_vcenter_credentials( + def pre_get_management_dns_zone_binding( self, - request: vmwareengine.ResetVcenterCredentialsRequest, + request: vmwareengine.GetManagementDnsZoneBindingRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ResetVcenterCredentialsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reset_vcenter_credentials + ) -> Tuple[ + vmwareengine.GetManagementDnsZoneBindingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_management_dns_zone_binding Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_reset_vcenter_credentials( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for reset_vcenter_credentials + def post_get_management_dns_zone_binding( + self, response: vmwareengine_resources.ManagementDnsZoneBinding + ) -> vmwareengine_resources.ManagementDnsZoneBinding: + """Post-rpc interceptor for get_management_dns_zone_binding Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1089,22 +1375,22 @@ def post_reset_vcenter_credentials( """ return response - def pre_show_nsx_credentials( + def pre_get_network_peering( self, - request: vmwareengine.ShowNsxCredentialsRequest, + request: vmwareengine.GetNetworkPeeringRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ShowNsxCredentialsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for show_nsx_credentials + ) -> Tuple[vmwareengine.GetNetworkPeeringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_network_peering Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_show_nsx_credentials( - self, response: vmwareengine_resources.Credentials - ) -> vmwareengine_resources.Credentials: - """Post-rpc interceptor for show_nsx_credentials + def post_get_network_peering( + self, response: vmwareengine_resources.NetworkPeering + ) -> vmwareengine_resources.NetworkPeering: + """Post-rpc interceptor for get_network_peering Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1112,22 +1398,22 @@ def post_show_nsx_credentials( """ return response - def pre_show_vcenter_credentials( + def pre_get_network_policy( self, - request: vmwareengine.ShowVcenterCredentialsRequest, + request: vmwareengine.GetNetworkPolicyRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.ShowVcenterCredentialsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for show_vcenter_credentials + ) -> Tuple[vmwareengine.GetNetworkPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_network_policy Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_show_vcenter_credentials( - self, response: vmwareengine_resources.Credentials - ) -> vmwareengine_resources.Credentials: - """Post-rpc interceptor for show_vcenter_credentials + def post_get_network_policy( + self, response: vmwareengine_resources.NetworkPolicy + ) -> vmwareengine_resources.NetworkPolicy: + """Post-rpc interceptor for get_network_policy Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1135,22 +1421,20 @@ def post_show_vcenter_credentials( """ return response - def pre_undelete_private_cloud( - self, - request: vmwareengine.UndeletePrivateCloudRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UndeletePrivateCloudRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for undelete_private_cloud + def pre_get_node( + self, request: vmwareengine.GetNodeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[vmwareengine.GetNodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_node Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_undelete_private_cloud( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for undelete_private_cloud + def post_get_node( + self, response: vmwareengine_resources.Node + ) -> vmwareengine_resources.Node: + """Post-rpc interceptor for get_node Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1158,22 +1442,22 @@ def post_undelete_private_cloud( """ return response - def pre_update_cluster( + def pre_get_node_type( self, - request: vmwareengine.UpdateClusterRequest, + request: vmwareengine.GetNodeTypeRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UpdateClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_cluster + ) -> Tuple[vmwareengine.GetNodeTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_node_type Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_cluster( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_cluster + def post_get_node_type( + self, response: vmwareengine_resources.NodeType + ) -> vmwareengine_resources.NodeType: + """Post-rpc interceptor for get_node_type Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1181,22 +1465,22 @@ def post_update_cluster( """ return response - def pre_update_network_policy( + def pre_get_private_cloud( self, - request: vmwareengine.UpdateNetworkPolicyRequest, + request: vmwareengine.GetPrivateCloudRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UpdateNetworkPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_network_policy + ) -> Tuple[vmwareengine.GetPrivateCloudRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_private_cloud Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_network_policy( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_network_policy + def post_get_private_cloud( + self, response: vmwareengine_resources.PrivateCloud + ) -> vmwareengine_resources.PrivateCloud: + """Post-rpc interceptor for get_private_cloud Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1204,22 +1488,22 @@ def post_update_network_policy( """ return response - def pre_update_private_cloud( + def pre_get_private_connection( self, - request: vmwareengine.UpdatePrivateCloudRequest, + request: vmwareengine.GetPrivateConnectionRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UpdatePrivateCloudRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_private_cloud + ) -> Tuple[vmwareengine.GetPrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_private_connection Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_private_cloud( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_private_cloud + def post_get_private_connection( + self, response: vmwareengine_resources.PrivateConnection + ) -> vmwareengine_resources.PrivateConnection: + """Post-rpc interceptor for get_private_connection Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1227,22 +1511,22 @@ def post_update_private_cloud( """ return response - def pre_update_private_connection( + def pre_get_subnet( self, - request: vmwareengine.UpdatePrivateConnectionRequest, + request: vmwareengine.GetSubnetRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UpdatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_private_connection + ) -> Tuple[vmwareengine.GetSubnetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_subnet Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_private_connection( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_private_connection + def post_get_subnet( + self, response: vmwareengine_resources.Subnet + ) -> vmwareengine_resources.Subnet: + """Post-rpc interceptor for get_subnet Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1250,22 +1534,22 @@ def post_update_private_connection( """ return response - def pre_update_subnet( + def pre_get_vmware_engine_network( self, - request: vmwareengine.UpdateSubnetRequest, + request: vmwareengine.GetVmwareEngineNetworkRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[vmwareengine.UpdateSubnetRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_subnet + ) -> Tuple[vmwareengine.GetVmwareEngineNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_vmware_engine_network Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_subnet( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_subnet + def post_get_vmware_engine_network( + self, response: vmwareengine_resources.VmwareEngineNetwork + ) -> vmwareengine_resources.VmwareEngineNetwork: + """Post-rpc interceptor for get_vmware_engine_network Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1273,24 +1557,22 @@ def post_update_subnet( """ return response - def pre_update_vmware_engine_network( + def pre_grant_dns_bind_permission( self, - request: vmwareengine.UpdateVmwareEngineNetworkRequest, + request: vmwareengine.GrantDnsBindPermissionRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ - vmwareengine.UpdateVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] - ]: - """Pre-rpc interceptor for update_vmware_engine_network + ) -> Tuple[vmwareengine.GrantDnsBindPermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for grant_dns_bind_permission Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_update_vmware_engine_network( + def post_grant_dns_bind_permission( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_vmware_engine_network + """Post-rpc interceptor for grant_dns_bind_permission Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1298,22 +1580,22 @@ def post_update_vmware_engine_network( """ return response - def pre_get_location( + def pre_list_clusters( self, - request: locations_pb2.GetLocationRequest, + request: vmwareengine.ListClustersRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[vmwareengine.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_list_clusters( + self, response: vmwareengine.ListClustersResponse + ) -> vmwareengine.ListClustersResponse: + """Post-rpc interceptor for list_clusters Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1321,22 +1603,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_list_external_access_rules( self, - request: locations_pb2.ListLocationsRequest, + request: vmwareengine.ListExternalAccessRulesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[vmwareengine.ListExternalAccessRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_external_access_rules Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_list_external_access_rules( + self, response: vmwareengine.ListExternalAccessRulesResponse + ) -> vmwareengine.ListExternalAccessRulesResponse: + """Post-rpc interceptor for list_external_access_rules Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1344,20 +1626,22 @@ def post_list_locations( """ return response - def pre_get_iam_policy( + def pre_list_external_addresses( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: vmwareengine.ListExternalAddressesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy + ) -> Tuple[vmwareengine.ListExternalAddressesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_external_addresses Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_list_external_addresses( + self, response: vmwareengine.ListExternalAddressesResponse + ) -> vmwareengine.ListExternalAddressesResponse: + """Post-rpc interceptor for list_external_addresses Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1365,20 +1649,22 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_set_iam_policy( + def pre_list_hcx_activation_keys( self, - request: iam_policy_pb2.SetIamPolicyRequest, + request: vmwareengine.ListHcxActivationKeysRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy + ) -> Tuple[vmwareengine.ListHcxActivationKeysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_hcx_activation_keys Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + def post_list_hcx_activation_keys( + self, response: vmwareengine.ListHcxActivationKeysResponse + ) -> vmwareengine.ListHcxActivationKeysResponse: + """Post-rpc interceptor for list_hcx_activation_keys Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1386,22 +1672,22 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_test_iam_permissions( + def pre_list_logging_servers( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: vmwareengine.ListLoggingServersRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions + ) -> Tuple[vmwareengine.ListLoggingServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_logging_servers Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_list_logging_servers( + self, response: vmwareengine.ListLoggingServersResponse + ) -> vmwareengine.ListLoggingServersResponse: + """Post-rpc interceptor for list_logging_servers Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1409,20 +1695,24 @@ def post_test_iam_permissions( """ return response - def pre_delete_operation( + def pre_list_management_dns_zone_bindings( self, - request: operations_pb2.DeleteOperationRequest, + request: vmwareengine.ListManagementDnsZoneBindingsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation + ) -> Tuple[ + vmwareengine.ListManagementDnsZoneBindingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_management_dns_zone_bindings Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_list_management_dns_zone_bindings( + self, response: vmwareengine.ListManagementDnsZoneBindingsResponse + ) -> vmwareengine.ListManagementDnsZoneBindingsResponse: + """Post-rpc interceptor for list_management_dns_zone_bindings Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1430,22 +1720,22 @@ def post_delete_operation(self, response: None) -> None: """ return response - def pre_get_operation( + def pre_list_network_peerings( self, - request: operations_pb2.GetOperationRequest, + request: vmwareengine.ListNetworkPeeringsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[vmwareengine.ListNetworkPeeringsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_network_peerings Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + def post_list_network_peerings( + self, response: vmwareengine.ListNetworkPeeringsResponse + ) -> vmwareengine.ListNetworkPeeringsResponse: + """Post-rpc interceptor for list_network_peerings Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1453,22 +1743,22 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_list_network_policies( self, - request: operations_pb2.ListOperationsRequest, + request: vmwareengine.ListNetworkPoliciesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[vmwareengine.ListNetworkPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_network_policies Override in a subclass to manipulate the request or metadata before they are sent to the VmwareEngine server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_list_network_policies( + self, response: vmwareengine.ListNetworkPoliciesResponse + ) -> vmwareengine.ListNetworkPoliciesResponse: + """Post-rpc interceptor for list_network_policies Override in a subclass to manipulate the response after it is returned by the VmwareEngine server but before @@ -1476,157 +1766,4278 @@ def post_list_operations( """ return response + def pre_list_nodes( + self, + request: vmwareengine.ListNodesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListNodesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_nodes -@dataclasses.dataclass -class VmwareEngineRestStub: - _session: AuthorizedSession - _host: str - _interceptor: VmwareEngineRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + def post_list_nodes( + self, response: vmwareengine.ListNodesResponse + ) -> vmwareengine.ListNodesResponse: + """Post-rpc interceptor for list_nodes -class VmwareEngineRestTransport(VmwareEngineTransport): - """REST backend transport for VmwareEngine. + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response - VMwareEngine manages VMware's private clusters in the Cloud. + def pre_list_node_types( + self, + request: vmwareengine.ListNodeTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListNodeTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_node_types - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_node_types( + self, response: vmwareengine.ListNodeTypesResponse + ) -> vmwareengine.ListNodeTypesResponse: + """Post-rpc interceptor for list_node_types + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_peering_routes( + self, + request: vmwareengine.ListPeeringRoutesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListPeeringRoutesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_peering_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_peering_routes( + self, response: vmwareengine.ListPeeringRoutesResponse + ) -> vmwareengine.ListPeeringRoutesResponse: + """Post-rpc interceptor for list_peering_routes + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_private_clouds( + self, + request: vmwareengine.ListPrivateCloudsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListPrivateCloudsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_private_clouds + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_private_clouds( + self, response: vmwareengine.ListPrivateCloudsResponse + ) -> vmwareengine.ListPrivateCloudsResponse: + """Post-rpc interceptor for list_private_clouds + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_private_connection_peering_routes( + self, + request: vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_private_connection_peering_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_private_connection_peering_routes( + self, response: vmwareengine.ListPrivateConnectionPeeringRoutesResponse + ) -> vmwareengine.ListPrivateConnectionPeeringRoutesResponse: + """Post-rpc interceptor for list_private_connection_peering_routes + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_private_connections( + self, + request: vmwareengine.ListPrivateConnectionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListPrivateConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_private_connections( + self, response: vmwareengine.ListPrivateConnectionsResponse + ) -> vmwareengine.ListPrivateConnectionsResponse: + """Post-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_subnets( + self, + request: vmwareengine.ListSubnetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListSubnetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_subnets + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_subnets( + self, response: vmwareengine.ListSubnetsResponse + ) -> vmwareengine.ListSubnetsResponse: + """Post-rpc interceptor for list_subnets + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_vmware_engine_networks( + self, + request: vmwareengine.ListVmwareEngineNetworksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ListVmwareEngineNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_vmware_engine_networks + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_vmware_engine_networks( + self, response: vmwareengine.ListVmwareEngineNetworksResponse + ) -> vmwareengine.ListVmwareEngineNetworksResponse: + """Post-rpc interceptor for list_vmware_engine_networks + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_repair_management_dns_zone_binding( + self, + request: vmwareengine.RepairManagementDnsZoneBindingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vmwareengine.RepairManagementDnsZoneBindingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for repair_management_dns_zone_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_repair_management_dns_zone_binding( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for repair_management_dns_zone_binding + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_reset_nsx_credentials( + self, + request: vmwareengine.ResetNsxCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ResetNsxCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset_nsx_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_reset_nsx_credentials( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reset_nsx_credentials + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_reset_vcenter_credentials( + self, + request: vmwareengine.ResetVcenterCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ResetVcenterCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset_vcenter_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_reset_vcenter_credentials( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reset_vcenter_credentials + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_revoke_dns_bind_permission( + self, + request: vmwareengine.RevokeDnsBindPermissionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.RevokeDnsBindPermissionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for revoke_dns_bind_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_revoke_dns_bind_permission( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for revoke_dns_bind_permission + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_show_nsx_credentials( + self, + request: vmwareengine.ShowNsxCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ShowNsxCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for show_nsx_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_show_nsx_credentials( + self, response: vmwareengine_resources.Credentials + ) -> vmwareengine_resources.Credentials: + """Post-rpc interceptor for show_nsx_credentials + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_show_vcenter_credentials( + self, + request: vmwareengine.ShowVcenterCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.ShowVcenterCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for show_vcenter_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_show_vcenter_credentials( + self, response: vmwareengine_resources.Credentials + ) -> vmwareengine_resources.Credentials: + """Post-rpc interceptor for show_vcenter_credentials + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_undelete_private_cloud( + self, + request: vmwareengine.UndeletePrivateCloudRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UndeletePrivateCloudRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for undelete_private_cloud + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_undelete_private_cloud( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for undelete_private_cloud + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, + request: vmwareengine.UpdateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_dns_forwarding( + self, + request: vmwareengine.UpdateDnsForwardingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateDnsForwardingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_dns_forwarding + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_dns_forwarding( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_dns_forwarding + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_external_access_rule( + self, + request: vmwareengine.UpdateExternalAccessRuleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateExternalAccessRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_external_access_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_external_access_rule( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_external_access_rule + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_external_address( + self, + request: vmwareengine.UpdateExternalAddressRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateExternalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_external_address + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_external_address( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_external_address + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_logging_server( + self, + request: vmwareengine.UpdateLoggingServerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateLoggingServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_logging_server + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_logging_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_logging_server + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_management_dns_zone_binding( + self, + request: vmwareengine.UpdateManagementDnsZoneBindingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vmwareengine.UpdateManagementDnsZoneBindingRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_management_dns_zone_binding + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_management_dns_zone_binding( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_management_dns_zone_binding + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_network_peering( + self, + request: vmwareengine.UpdateNetworkPeeringRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateNetworkPeeringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_network_peering + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_network_peering( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_network_peering + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_network_policy( + self, + request: vmwareengine.UpdateNetworkPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateNetworkPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_network_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_network_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_network_policy + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_private_cloud( + self, + request: vmwareengine.UpdatePrivateCloudRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdatePrivateCloudRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_private_cloud + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_private_cloud( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_private_cloud + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_private_connection( + self, + request: vmwareengine.UpdatePrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_private_connection + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_subnet( + self, + request: vmwareengine.UpdateSubnetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vmwareengine.UpdateSubnetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_subnet + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_subnet( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_subnet + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_update_vmware_engine_network( + self, + request: vmwareengine.UpdateVmwareEngineNetworkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vmwareengine.UpdateVmwareEngineNetworkRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_vmware_engine_network + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_update_vmware_engine_network( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_vmware_engine_network + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the VmwareEngine server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the VmwareEngine server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VmwareEngineRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VmwareEngineRestInterceptor + + +class VmwareEngineRestTransport(VmwareEngineTransport): + """REST backend transport for VmwareEngine. + + VMwareEngine manages VMware's private clusters in the Cloud. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ + """ + + def __init__( + self, + *, + host: str = "vmwareengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VmwareEngineRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VmwareEngineRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateCluster(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "clusterId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.vmwareengine.CreateClusterRequest): + The request object. Request message for + [VmwareEngine.CreateCluster][google.cloud.vmwareengine.v1.VmwareEngine.CreateCluster] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = vmwareengine.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _CreateExternalAccessRule(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateExternalAccessRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "externalAccessRuleId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateExternalAccessRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create external access + rule method over HTTP. + + Args: + request (~.vmwareengine.CreateExternalAccessRuleRequest): + The request object. Request message for + [VmwareEngine.CreateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/networkPolicies/*}/externalAccessRules", + "body": "external_access_rule", + }, + ] + request, metadata = self._interceptor.pre_create_external_access_rule( + request, metadata + ) + pb_request = vmwareengine.CreateExternalAccessRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_external_access_rule(resp) + return resp + + class _CreateExternalAddress(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateExternalAddress") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "externalAddressId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateExternalAddressRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create external address method over HTTP. + + Args: + request (~.vmwareengine.CreateExternalAddressRequest): + The request object. Request message for + [VmwareEngine.CreateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/externalAddresses", + "body": "external_address", + }, + ] + request, metadata = self._interceptor.pre_create_external_address( + request, metadata + ) + pb_request = vmwareengine.CreateExternalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_external_address(resp) + return resp + + class _CreateHcxActivationKey(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateHcxActivationKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "hcxActivationKeyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateHcxActivationKeyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create hcx activation key method over HTTP. + + Args: + request (~.vmwareengine.CreateHcxActivationKeyRequest): + The request object. Request message for + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys", + "body": "hcx_activation_key", + }, + ] + request, metadata = self._interceptor.pre_create_hcx_activation_key( + request, metadata + ) + pb_request = vmwareengine.CreateHcxActivationKeyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_hcx_activation_key(resp) + return resp + + class _CreateLoggingServer(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateLoggingServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "loggingServerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateLoggingServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create logging server method over HTTP. + + Args: + request (~.vmwareengine.CreateLoggingServerRequest): + The request object. Request message for + [VmwareEngine.CreateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/loggingServers", + "body": "logging_server", + }, + ] + request, metadata = self._interceptor.pre_create_logging_server( + request, metadata + ) + pb_request = vmwareengine.CreateLoggingServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_logging_server(resp) + return resp + + class _CreateManagementDnsZoneBinding(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateManagementDnsZoneBinding") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "managementDnsZoneBindingId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateManagementDnsZoneBindingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create management dns + zone binding method over HTTP. + + Args: + request (~.vmwareengine.CreateManagementDnsZoneBindingRequest): + The request object. Request message for + [VmwareEngine.CreateManagementDnsZoneBindings][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/managementDnsZoneBindings", + "body": "management_dns_zone_binding", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_create_management_dns_zone_binding( + request, metadata + ) + pb_request = vmwareengine.CreateManagementDnsZoneBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_management_dns_zone_binding(resp) + return resp + + class _CreateNetworkPeering(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateNetworkPeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkPeeringId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateNetworkPeeringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create network peering method over HTTP. + + Args: + request (~.vmwareengine.CreateNetworkPeeringRequest): + The request object. Request message for + [VmwareEngine.CreateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/networkPeerings", + "body": "network_peering", + }, + ] + request, metadata = self._interceptor.pre_create_network_peering( + request, metadata + ) + pb_request = vmwareengine.CreateNetworkPeeringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_network_peering(resp) + return resp + + class _CreateNetworkPolicy(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateNetworkPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateNetworkPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create network policy method over HTTP. + + Args: + request (~.vmwareengine.CreateNetworkPolicyRequest): + The request object. Request message for + [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/networkPolicies", + "body": "network_policy", + }, + ] + request, metadata = self._interceptor.pre_create_network_policy( + request, metadata + ) + pb_request = vmwareengine.CreateNetworkPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_network_policy(resp) + return resp + + class _CreatePrivateCloud(VmwareEngineRestStub): + def __hash__(self): + return hash("CreatePrivateCloud") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "privateCloudId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreatePrivateCloudRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create private cloud method over HTTP. + + Args: + request (~.vmwareengine.CreatePrivateCloudRequest): + The request object. Request message for + [VmwareEngine.CreatePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/privateClouds", + "body": "private_cloud", + }, + ] + request, metadata = self._interceptor.pre_create_private_cloud( + request, metadata + ) + pb_request = vmwareengine.CreatePrivateCloudRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_private_cloud(resp) + return resp + + class _CreatePrivateConnection(VmwareEngineRestStub): + def __hash__(self): + return hash("CreatePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "privateConnectionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreatePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create private connection method over HTTP. + + Args: + request (~.vmwareengine.CreatePrivateConnectionRequest): + The request object. Request message for + [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", + "body": "private_connection", + }, + ] + request, metadata = self._interceptor.pre_create_private_connection( + request, metadata + ) + pb_request = vmwareengine.CreatePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_private_connection(resp) + return resp + + class _CreateVmwareEngineNetwork(VmwareEngineRestStub): + def __hash__(self): + return hash("CreateVmwareEngineNetwork") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "vmwareEngineNetworkId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.CreateVmwareEngineNetworkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create vmware engine + network method over HTTP. + + Args: + request (~.vmwareengine.CreateVmwareEngineNetworkRequest): + The request object. Request message for + [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks", + "body": "vmware_engine_network", + }, + ] + request, metadata = self._interceptor.pre_create_vmware_engine_network( + request, metadata + ) + pb_request = vmwareengine.CreateVmwareEngineNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_vmware_engine_network(resp) + return resp + + class _DeleteCluster(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.vmwareengine.DeleteClusterRequest): + The request object. Request message for + [VmwareEngine.DeleteCluster][google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = vmwareengine.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DeleteExternalAccessRule(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteExternalAccessRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteExternalAccessRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete external access + rule method over HTTP. + + Args: + request (~.vmwareengine.DeleteExternalAccessRuleRequest): + The request object. Request message for + [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_external_access_rule( + request, metadata + ) + pb_request = vmwareengine.DeleteExternalAccessRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_external_access_rule(resp) + return resp + + class _DeleteExternalAddress(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteExternalAddress") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteExternalAddressRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete external address method over HTTP. + + Args: + request (~.vmwareengine.DeleteExternalAddressRequest): + The request object. Request message for + [VmwareEngine.DeleteExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/externalAddresses/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_external_address( + request, metadata + ) + pb_request = vmwareengine.DeleteExternalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_external_address(resp) + return resp + + class _DeleteLoggingServer(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteLoggingServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteLoggingServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete logging server method over HTTP. + + Args: + request (~.vmwareengine.DeleteLoggingServerRequest): + The request object. Request message for + [VmwareEngine.DeleteLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/loggingServers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_logging_server( + request, metadata + ) + pb_request = vmwareengine.DeleteLoggingServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_logging_server(resp) + return resp + + class _DeleteManagementDnsZoneBinding(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteManagementDnsZoneBinding") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteManagementDnsZoneBindingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete management dns + zone binding method over HTTP. + + Args: + request (~.vmwareengine.DeleteManagementDnsZoneBindingRequest): + The request object. Request message for + [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_delete_management_dns_zone_binding( + request, metadata + ) + pb_request = vmwareengine.DeleteManagementDnsZoneBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_management_dns_zone_binding(resp) + return resp + + class _DeleteNetworkPeering(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteNetworkPeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteNetworkPeeringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete network peering method over HTTP. + + Args: + request (~.vmwareengine.DeleteNetworkPeeringRequest): + The request object. Request message for + [VmwareEngine.DeleteNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/networkPeerings/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_network_peering( + request, metadata + ) + pb_request = vmwareengine.DeleteNetworkPeeringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_network_peering(resp) + return resp + + class _DeleteNetworkPolicy(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteNetworkPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteNetworkPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete network policy method over HTTP. + + Args: + request (~.vmwareengine.DeleteNetworkPolicyRequest): + The request object. Request message for + [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_network_policy( + request, metadata + ) + pb_request = vmwareengine.DeleteNetworkPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_network_policy(resp) + return resp + + class _DeletePrivateCloud(VmwareEngineRestStub): + def __hash__(self): + return hash("DeletePrivateCloud") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeletePrivateCloudRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete private cloud method over HTTP. + + Args: + request (~.vmwareengine.DeletePrivateCloudRequest): + The request object. Request message for + [VmwareEngine.DeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_private_cloud( + request, metadata + ) + pb_request = vmwareengine.DeletePrivateCloudRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_private_cloud(resp) + return resp + + class _DeletePrivateConnection(VmwareEngineRestStub): + def __hash__(self): + return hash("DeletePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeletePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete private connection method over HTTP. + + Args: + request (~.vmwareengine.DeletePrivateConnectionRequest): + The request object. Request message for + [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_private_connection( + request, metadata + ) + pb_request = vmwareengine.DeletePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_private_connection(resp) + return resp + + class _DeleteVmwareEngineNetwork(VmwareEngineRestStub): + def __hash__(self): + return hash("DeleteVmwareEngineNetwork") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.DeleteVmwareEngineNetworkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete vmware engine + network method over HTTP. + + Args: + request (~.vmwareengine.DeleteVmwareEngineNetworkRequest): + The request object. Request message for + [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_vmware_engine_network( + request, metadata + ) + pb_request = vmwareengine.DeleteVmwareEngineNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_vmware_engine_network(resp) + return resp + + class _FetchNetworkPolicyExternalAddresses(VmwareEngineRestStub): + def __hash__(self): + return hash("FetchNetworkPolicyExternalAddresses") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.FetchNetworkPolicyExternalAddressesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine.FetchNetworkPolicyExternalAddressesResponse: + r"""Call the fetch network policy + external addresses method over HTTP. + + Args: + request (~.vmwareengine.FetchNetworkPolicyExternalAddressesRequest): + The request object. Request message for + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine.FetchNetworkPolicyExternalAddressesResponse: + Response message for + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{network_policy=projects/*/locations/*/networkPolicies/*}:fetchExternalAddresses", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_fetch_network_policy_external_addresses( + request, metadata + ) + pb_request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + pb_resp = vmwareengine.FetchNetworkPolicyExternalAddressesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_network_policy_external_addresses(resp) + return resp + + class _GetCluster(VmwareEngineRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.vmwareengine.GetClusterRequest): + The request object. Request message for + [VmwareEngine.GetCluster][google.cloud.vmwareengine.v1.VmwareEngine.GetCluster] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.Cluster: + A cluster in a private cloud. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = vmwareengine.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.Cluster() + pb_resp = vmwareengine_resources.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _GetDnsBindPermission(VmwareEngineRestStub): + def __hash__(self): + return hash("GetDnsBindPermission") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetDnsBindPermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.DnsBindPermission: + r"""Call the get dns bind permission method over HTTP. + + Args: + request (~.vmwareengine.GetDnsBindPermissionRequest): + The request object. Request message for + [VmwareEngine.GetDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.DnsBindPermission: + DnsBindPermission resource that + contains the accounts having the + consumer DNS bind permission on the + corresponding intranet VPC of the + consumer project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dnsBindPermission}", + }, + ] + request, metadata = self._interceptor.pre_get_dns_bind_permission( + request, metadata + ) + pb_request = vmwareengine.GetDnsBindPermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.DnsBindPermission() + pb_resp = vmwareengine_resources.DnsBindPermission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dns_bind_permission(resp) + return resp + + class _GetDnsForwarding(VmwareEngineRestStub): + def __hash__(self): + return hash("GetDnsForwarding") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetDnsForwardingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.DnsForwarding: + r"""Call the get dns forwarding method over HTTP. + + Args: + request (~.vmwareengine.GetDnsForwardingRequest): + The request object. Request message for + [VmwareEngine.GetDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.DnsForwarding: + DNS forwarding config. + This config defines a list of domain to + name server mappings, and is attached to + the private cloud for custom domain + resolution. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/dnsForwarding}", + }, + ] + request, metadata = self._interceptor.pre_get_dns_forwarding( + request, metadata + ) + pb_request = vmwareengine.GetDnsForwardingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.DnsForwarding() + pb_resp = vmwareengine_resources.DnsForwarding.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dns_forwarding(resp) + return resp + + class _GetExternalAccessRule(VmwareEngineRestStub): + def __hash__(self): + return hash("GetExternalAccessRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetExternalAccessRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.ExternalAccessRule: + r"""Call the get external access rule method over HTTP. + + Args: + request (~.vmwareengine.GetExternalAccessRuleRequest): + The request object. Request message for + [VmwareEngine.GetExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.ExternalAccessRule: + External access firewall rules for filtering incoming + traffic destined to ``ExternalAddress`` resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}", + }, + ] + request, metadata = self._interceptor.pre_get_external_access_rule( + request, metadata + ) + pb_request = vmwareengine.GetExternalAccessRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.ExternalAccessRule() + pb_resp = vmwareengine_resources.ExternalAccessRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_external_access_rule(resp) + return resp + + class _GetExternalAddress(VmwareEngineRestStub): + def __hash__(self): + return hash("GetExternalAddress") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetExternalAddressRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.ExternalAddress: + r"""Call the get external address method over HTTP. + + Args: + request (~.vmwareengine.GetExternalAddressRequest): + The request object. Request message for + [VmwareEngine.GetExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.ExternalAddress: + Represents an allocated external IP + address and its corresponding internal + IP address in a private cloud. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/externalAddresses/*}", + }, + ] + request, metadata = self._interceptor.pre_get_external_address( + request, metadata + ) + pb_request = vmwareengine.GetExternalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.ExternalAddress() + pb_resp = vmwareengine_resources.ExternalAddress.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_external_address(resp) + return resp + + class _GetHcxActivationKey(VmwareEngineRestStub): + def __hash__(self): + return hash("GetHcxActivationKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetHcxActivationKeyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.HcxActivationKey: + r"""Call the get hcx activation key method over HTTP. + + Args: + request (~.vmwareengine.GetHcxActivationKeyRequest): + The request object. Request message for + [VmwareEngine.GetHcxActivationKeys][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.HcxActivationKey: + HCX activation key. A default key is created during + private cloud provisioning, but this behavior is subject + to change and you should always verify active keys. Use + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + to retrieve existing keys and + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + to create new ones. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/hcxActivationKeys/*}", + }, + ] + request, metadata = self._interceptor.pre_get_hcx_activation_key( + request, metadata + ) + pb_request = vmwareengine.GetHcxActivationKeyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.HcxActivationKey() + pb_resp = vmwareengine_resources.HcxActivationKey.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_hcx_activation_key(resp) + return resp + + class _GetLoggingServer(VmwareEngineRestStub): + def __hash__(self): + return hash("GetLoggingServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetLoggingServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.LoggingServer: + r"""Call the get logging server method over HTTP. + + Args: + request (~.vmwareengine.GetLoggingServerRequest): + The request object. Request message for + [VmwareEngine.GetLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.LoggingServer: + Logging server to receive vCenter or + ESXi logs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/loggingServers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_logging_server( + request, metadata + ) + pb_request = vmwareengine.GetLoggingServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.LoggingServer() + pb_resp = vmwareengine_resources.LoggingServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_logging_server(resp) + return resp + + class _GetManagementDnsZoneBinding(VmwareEngineRestStub): + def __hash__(self): + return hash("GetManagementDnsZoneBinding") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetManagementDnsZoneBindingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.ManagementDnsZoneBinding: + r"""Call the get management dns zone + binding method over HTTP. + + Args: + request (~.vmwareengine.GetManagementDnsZoneBindingRequest): + The request object. Request message for + [VmwareEngine.GetManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.ManagementDnsZoneBinding: + Represents a binding between a + network and the management DNS zone. A + management DNS zone is the Cloud DNS + cross-project binding zone that VMware + Engine creates for each private cloud. + It contains FQDNs and corresponding IP + addresses for the private cloud's ESXi + hosts and management VM appliances like + vCenter and NSX Manager. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_management_dns_zone_binding( + request, metadata + ) + pb_request = vmwareengine.GetManagementDnsZoneBindingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.ManagementDnsZoneBinding() + pb_resp = vmwareengine_resources.ManagementDnsZoneBinding.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_management_dns_zone_binding(resp) + return resp + + class _GetNetworkPeering(VmwareEngineRestStub): + def __hash__(self): + return hash("GetNetworkPeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetNetworkPeeringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPeering: + r"""Call the get network peering method over HTTP. + + Args: + request (~.vmwareengine.GetNetworkPeeringRequest): + The request object. Request message for + [VmwareEngine.GetNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.NetworkPeering: + Details of a network peering. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/networkPeerings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_network_peering( + request, metadata + ) + pb_request = vmwareengine.GetNetworkPeeringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.NetworkPeering() + pb_resp = vmwareengine_resources.NetworkPeering.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_network_peering(resp) + return resp + + class _GetNetworkPolicy(VmwareEngineRestStub): + def __hash__(self): + return hash("GetNetworkPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetNetworkPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NetworkPolicy: + r"""Call the get network policy method over HTTP. + + Args: + request (~.vmwareengine.GetNetworkPolicyRequest): + The request object. Request message for + [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.NetworkPolicy: + Represents a network policy resource. + Network policies are regional resources. + You can use a network policy to enable + or disable internet access and external + IP access. Network policies are + associated with a VMware Engine network, + which might span across regions. For a + given region, a network policy applies + to all private clouds in the VMware + Engine network associated with the + policy. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_network_policy( + request, metadata + ) + pb_request = vmwareengine.GetNetworkPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.NetworkPolicy() + pb_resp = vmwareengine_resources.NetworkPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_network_policy(resp) + return resp + + class _GetNode(VmwareEngineRestStub): + def __hash__(self): + return hash("GetNode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetNodeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.Node: + r"""Call the get node method over HTTP. + + Args: + request (~.vmwareengine.GetNodeRequest): + The request object. Request message for + [VmwareEngine.GetNode][google.cloud.vmwareengine.v1.VmwareEngine.GetNode] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.Node: + Node in a cluster. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*/nodes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_node(request, metadata) + pb_request = vmwareengine.GetNodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.Node() + pb_resp = vmwareengine_resources.Node.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_node(resp) + return resp + + class _GetNodeType(VmwareEngineRestStub): + def __hash__(self): + return hash("GetNodeType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vmwareengine.GetNodeTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.NodeType: + r"""Call the get node type method over HTTP. + + Args: + request (~.vmwareengine.GetNodeTypeRequest): + The request object. Request message for + [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.NodeType: + Describes node type. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/nodeTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_node_type(request, metadata) + pb_request = vmwareengine.GetNodeTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) - def __init__( - self, - *, - host: str = "vmwareengine.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[VmwareEngineRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + query_params["$alt"] = "json;enum-encoding=int" - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - url_match_items = maybe_url_match.groupdict() + # Return the response + resp = vmwareengine_resources.NodeType() + pb_resp = vmwareengine_resources.NodeType.pb(resp) - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_node_type(resp) + return resp - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or VmwareEngineRestInterceptor() - self._prep_wrapped_messages(client_info) + class _GetPrivateCloud(VmwareEngineRestStub): + def __hash__(self): + return hash("GetPrivateCloud") - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict } - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", + def __call__( + self, + request: vmwareengine.GetPrivateCloudRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vmwareengine_resources.PrivateCloud: + r"""Call the get private cloud method over HTTP. + + Args: + request (~.vmwareengine.GetPrivateCloudRequest): + The request object. Request message for + [VmwareEngine.GetPrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vmwareengine_resources.PrivateCloud: + Represents a private cloud resource. Private clouds of + type ``STANDARD`` and ``TIME_LIMITED`` are zonal + resources, ``STRETCHED`` private clouds are regional. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}", + }, + ] + request, metadata = self._interceptor.pre_get_private_cloud( + request, metadata ) + pb_request = vmwareengine.GetPrivateCloudRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) ) + query_params.update(self._get_unset_required_fields(query_params)) - # Return the client from cache. - return self._operations_client + query_params["$alt"] = "json;enum-encoding=int" - class _CreateCluster(VmwareEngineRestStub): + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vmwareengine_resources.PrivateCloud() + pb_resp = vmwareengine_resources.PrivateCloud.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_private_cloud(resp) + return resp + + class _GetPrivateConnection(VmwareEngineRestStub): def __hash__(self): - return hash("CreateCluster") + return hash("GetPrivateConnection") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "clusterId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1638,18 +6049,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreateClusterRequest, + request: vmwareengine.GetPrivateConnectionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create cluster method over HTTP. + ) -> vmwareengine_resources.PrivateConnection: + r"""Call the get private connection method over HTTP. Args: - request (~.vmwareengine.CreateClusterRequest): + request (~.vmwareengine.GetPrivateConnectionRequest): The request object. Request message for - [VmwareEngine.CreateCluster][google.cloud.vmwareengine.v1.VmwareEngine.CreateCluster] + [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1657,31 +6068,25 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine_resources.PrivateConnection: + Private connection resource that + provides connectivity for VMware Engine + private clouds. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters", - "body": "cluster", + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", }, ] - request, metadata = self._interceptor.pre_create_cluster(request, metadata) - pb_request = vmwareengine.CreateClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + request, metadata = self._interceptor.pre_get_private_connection( + request, metadata ) + pb_request = vmwareengine.GetPrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1705,7 +6110,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1714,18 +6118,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_cluster(resp) + resp = vmwareengine_resources.PrivateConnection() + pb_resp = vmwareengine_resources.PrivateConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_private_connection(resp) return resp - class _CreateHcxActivationKey(VmwareEngineRestStub): + class _GetSubnet(VmwareEngineRestStub): def __hash__(self): - return hash("CreateHcxActivationKey") + return hash("GetSubnet") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "hcxActivationKeyId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1737,18 +6141,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreateHcxActivationKeyRequest, + request: vmwareengine.GetSubnetRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create hcx activation key method over HTTP. + ) -> vmwareengine_resources.Subnet: + r"""Call the get subnet method over HTTP. Args: - request (~.vmwareengine.CreateHcxActivationKeyRequest): + request (~.vmwareengine.GetSubnetRequest): The request object. Request message for - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1756,33 +6160,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine_resources.Subnet: + Subnet in a private cloud. Either ``management`` subnets + (such as vMotion) that are read-only, or + ``userDefined``, which can also be updated. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys", - "body": "hcx_activation_key", + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/subnets/*}", }, ] - request, metadata = self._interceptor.pre_create_hcx_activation_key( - request, metadata - ) - pb_request = vmwareengine.CreateHcxActivationKeyRequest.pb(request) + request, metadata = self._interceptor.pre_get_subnet(request, metadata) + pb_request = vmwareengine.GetSubnetRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1806,7 +6200,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1815,18 +6208,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_hcx_activation_key(resp) + resp = vmwareengine_resources.Subnet() + pb_resp = vmwareengine_resources.Subnet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_subnet(resp) return resp - class _CreateNetworkPolicy(VmwareEngineRestStub): + class _GetVmwareEngineNetwork(VmwareEngineRestStub): def __hash__(self): - return hash("CreateNetworkPolicy") + return hash("GetVmwareEngineNetwork") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "networkPolicyId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1838,18 +6231,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreateNetworkPolicyRequest, + request: vmwareengine.GetVmwareEngineNetworkRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create network policy method over HTTP. + ) -> vmwareengine_resources.VmwareEngineNetwork: + r"""Call the get vmware engine network method over HTTP. Args: - request (~.vmwareengine.CreateNetworkPolicyRequest): + request (~.vmwareengine.GetVmwareEngineNetworkRequest): The request object. Request message for - [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1857,33 +6250,25 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine_resources.VmwareEngineNetwork: + VMware Engine network resource that + provides connectivity for VMware Engine + private clouds. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/networkPolicies", - "body": "network_policy", + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}", }, ] - request, metadata = self._interceptor.pre_create_network_policy( + request, metadata = self._interceptor.pre_get_vmware_engine_network( request, metadata ) - pb_request = vmwareengine.CreateNetworkPolicyRequest.pb(request) + pb_request = vmwareengine.GetVmwareEngineNetworkRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1907,7 +6292,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1916,18 +6300,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_network_policy(resp) + resp = vmwareengine_resources.VmwareEngineNetwork() + pb_resp = vmwareengine_resources.VmwareEngineNetwork.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_vmware_engine_network(resp) return resp - class _CreatePrivateCloud(VmwareEngineRestStub): + class _GrantDnsBindPermission(VmwareEngineRestStub): def __hash__(self): - return hash("CreatePrivateCloud") + return hash("GrantDnsBindPermission") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "privateCloudId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1939,18 +6323,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreatePrivateCloudRequest, + request: vmwareengine.GrantDnsBindPermissionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create private cloud method over HTTP. + r"""Call the grant dns bind permission method over HTTP. Args: - request (~.vmwareengine.CreatePrivateCloudRequest): + request (~.vmwareengine.GrantDnsBindPermissionRequest): The request object. Request message for - [VmwareEngine.CreatePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud] + [VmwareEngine.GrantDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1968,14 +6352,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/privateClouds", - "body": "private_cloud", + "uri": "/v1/{name=projects/*/locations/*/dnsBindPermission}:grant", + "body": "*", }, ] - request, metadata = self._interceptor.pre_create_private_cloud( + request, metadata = self._interceptor.pre_grant_dns_bind_permission( request, metadata ) - pb_request = vmwareengine.CreatePrivateCloudRequest.pb(request) + pb_request = vmwareengine.GrantDnsBindPermissionRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2019,16 +6403,14 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_private_cloud(resp) + resp = self._interceptor.post_grant_dns_bind_permission(resp) return resp - class _CreatePrivateConnection(VmwareEngineRestStub): + class _ListClusters(VmwareEngineRestStub): def __hash__(self): - return hash("CreatePrivateConnection") + return hash("ListClusters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "privateConnectionId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2040,18 +6422,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreatePrivateConnectionRequest, + request: vmwareengine.ListClustersRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create private connection method over HTTP. + ) -> vmwareengine.ListClustersResponse: + r"""Call the list clusters method over HTTP. Args: - request (~.vmwareengine.CreatePrivateConnectionRequest): + request (~.vmwareengine.ListClustersRequest): The request object. Request message for - [VmwareEngine.CreatePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection] + [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2059,33 +6441,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine.ListClustersResponse: + Response message for + [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", - "body": "private_connection", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters", }, ] - request, metadata = self._interceptor.pre_create_private_connection( - request, metadata - ) - pb_request = vmwareengine.CreatePrivateConnectionRequest.pb(request) + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = vmwareengine.ListClustersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2109,7 +6480,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2118,18 +6488,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_private_connection(resp) + resp = vmwareengine.ListClustersResponse() + pb_resp = vmwareengine.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) return resp - class _CreateVmwareEngineNetwork(VmwareEngineRestStub): + class _ListExternalAccessRules(VmwareEngineRestStub): def __hash__(self): - return hash("CreateVmwareEngineNetwork") + return hash("ListExternalAccessRules") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "vmwareEngineNetworkId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -2141,19 +6511,19 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.CreateVmwareEngineNetworkRequest, + request: vmwareengine.ListExternalAccessRulesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create vmware engine - network method over HTTP. + ) -> vmwareengine.ListExternalAccessRulesResponse: + r"""Call the list external access + rules method over HTTP. Args: - request (~.vmwareengine.CreateVmwareEngineNetworkRequest): + request (~.vmwareengine.ListExternalAccessRulesRequest): The request object. Request message for - [VmwareEngine.CreateVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork] + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2161,33 +6531,24 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine.ListExternalAccessRulesResponse: + Response message for + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks", - "body": "vmware_engine_network", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/networkPolicies/*}/externalAccessRules", }, ] - request, metadata = self._interceptor.pre_create_vmware_engine_network( + request, metadata = self._interceptor.pre_list_external_access_rules( request, metadata ) - pb_request = vmwareengine.CreateVmwareEngineNetworkRequest.pb(request) + pb_request = vmwareengine.ListExternalAccessRulesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2211,7 +6572,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2220,14 +6580,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_vmware_engine_network(resp) + resp = vmwareengine.ListExternalAccessRulesResponse() + pb_resp = vmwareengine.ListExternalAccessRulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_external_access_rules(resp) return resp - class _DeleteCluster(VmwareEngineRestStub): + class _ListExternalAddresses(VmwareEngineRestStub): def __hash__(self): - return hash("DeleteCluster") + return hash("ListExternalAddresses") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2241,18 +6603,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.DeleteClusterRequest, + request: vmwareengine.ListExternalAddressesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete cluster method over HTTP. + ) -> vmwareengine.ListExternalAddressesResponse: + r"""Call the list external addresses method over HTTP. Args: - request (~.vmwareengine.DeleteClusterRequest): + request (~.vmwareengine.ListExternalAddressesRequest): The request object. Request message for - [VmwareEngine.DeleteCluster][google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster] + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2260,21 +6622,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine.ListExternalAddressesResponse: + Response message for + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/externalAddresses", }, ] - request, metadata = self._interceptor.pre_delete_cluster(request, metadata) - pb_request = vmwareengine.DeleteClusterRequest.pb(request) + request, metadata = self._interceptor.pre_list_external_addresses( + request, metadata + ) + pb_request = vmwareengine.ListExternalAddressesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2308,14 +6671,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_cluster(resp) + resp = vmwareengine.ListExternalAddressesResponse() + pb_resp = vmwareengine.ListExternalAddressesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_external_addresses(resp) return resp - class _DeleteNetworkPolicy(VmwareEngineRestStub): + class _ListHcxActivationKeys(VmwareEngineRestStub): def __hash__(self): - return hash("DeleteNetworkPolicy") + return hash("ListHcxActivationKeys") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2329,18 +6694,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.DeleteNetworkPolicyRequest, + request: vmwareengine.ListHcxActivationKeysRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete network policy method over HTTP. + ) -> vmwareengine.ListHcxActivationKeysResponse: + r"""Call the list hcx activation keys method over HTTP. Args: - request (~.vmwareengine.DeleteNetworkPolicyRequest): + request (~.vmwareengine.ListHcxActivationKeysRequest): The request object. Request message for - [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2348,23 +6713,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine.ListHcxActivationKeysResponse: + Response message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys", }, ] - request, metadata = self._interceptor.pre_delete_network_policy( + request, metadata = self._interceptor.pre_list_hcx_activation_keys( request, metadata ) - pb_request = vmwareengine.DeleteNetworkPolicyRequest.pb(request) + pb_request = vmwareengine.ListHcxActivationKeysRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2398,14 +6762,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_network_policy(resp) + resp = vmwareengine.ListHcxActivationKeysResponse() + pb_resp = vmwareengine.ListHcxActivationKeysResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_hcx_activation_keys(resp) return resp - class _DeletePrivateCloud(VmwareEngineRestStub): + class _ListLoggingServers(VmwareEngineRestStub): def __hash__(self): - return hash("DeletePrivateCloud") + return hash("ListLoggingServers") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2419,18 +6785,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.DeletePrivateCloudRequest, + request: vmwareengine.ListLoggingServersRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete private cloud method over HTTP. + ) -> vmwareengine.ListLoggingServersResponse: + r"""Call the list logging servers method over HTTP. Args: - request (~.vmwareengine.DeletePrivateCloudRequest): + request (~.vmwareengine.ListLoggingServersRequest): The request object. Request message for - [VmwareEngine.DeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud] + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2438,23 +6804,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.vmwareengine.ListLoggingServersResponse: + Response message for + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/loggingServers", }, ] - request, metadata = self._interceptor.pre_delete_private_cloud( + request, metadata = self._interceptor.pre_list_logging_servers( request, metadata ) - pb_request = vmwareengine.DeletePrivateCloudRequest.pb(request) + pb_request = vmwareengine.ListLoggingServersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2488,14 +6853,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_private_cloud(resp) + resp = vmwareengine.ListLoggingServersResponse() + pb_resp = vmwareengine.ListLoggingServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_logging_servers(resp) return resp - class _DeletePrivateConnection(VmwareEngineRestStub): + class _ListManagementDnsZoneBindings(VmwareEngineRestStub): def __hash__(self): - return hash("DeletePrivateConnection") + return hash("ListManagementDnsZoneBindings") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2509,42 +6876,42 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.DeletePrivateConnectionRequest, + request: vmwareengine.ListManagementDnsZoneBindingsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete private connection method over HTTP. + ) -> vmwareengine.ListManagementDnsZoneBindingsResponse: + r"""Call the list management dns zone + bindings method over HTTP. - Args: - request (~.vmwareengine.DeletePrivateConnectionRequest): - The request object. Request message for - [VmwareEngine.DeletePrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.ListManagementDnsZoneBindingsRequest): + The request object. Request message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.vmwareengine.ListManagementDnsZoneBindingsResponse: + Response message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/managementDnsZoneBindings", }, ] - request, metadata = self._interceptor.pre_delete_private_connection( + request, metadata = self._interceptor.pre_list_management_dns_zone_bindings( request, metadata ) - pb_request = vmwareengine.DeletePrivateConnectionRequest.pb(request) + pb_request = vmwareengine.ListManagementDnsZoneBindingsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2578,14 +6945,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_private_connection(resp) + resp = vmwareengine.ListManagementDnsZoneBindingsResponse() + pb_resp = vmwareengine.ListManagementDnsZoneBindingsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_management_dns_zone_bindings(resp) return resp - class _DeleteVmwareEngineNetwork(VmwareEngineRestStub): + class _ListNetworkPeerings(VmwareEngineRestStub): def __hash__(self): - return hash("DeleteVmwareEngineNetwork") + return hash("ListNetworkPeerings") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2599,43 +6968,41 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.DeleteVmwareEngineNetworkRequest, + request: vmwareengine.ListNetworkPeeringsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete vmware engine - network method over HTTP. + ) -> vmwareengine.ListNetworkPeeringsResponse: + r"""Call the list network peerings method over HTTP. - Args: - request (~.vmwareengine.DeleteVmwareEngineNetworkRequest): - The request object. Request message for - [VmwareEngine.DeleteVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.ListNetworkPeeringsRequest): + The request object. Request message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.vmwareengine.ListNetworkPeeringsResponse: + Response message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}", + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/networkPeerings", }, ] - request, metadata = self._interceptor.pre_delete_vmware_engine_network( + request, metadata = self._interceptor.pre_list_network_peerings( request, metadata ) - pb_request = vmwareengine.DeleteVmwareEngineNetworkRequest.pb(request) + pb_request = vmwareengine.ListNetworkPeeringsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2669,14 +7036,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_vmware_engine_network(resp) + resp = vmwareengine.ListNetworkPeeringsResponse() + pb_resp = vmwareengine.ListNetworkPeeringsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_network_peerings(resp) return resp - class _GetCluster(VmwareEngineRestStub): + class _ListNetworkPolicies(VmwareEngineRestStub): def __hash__(self): - return hash("GetCluster") + return hash("ListNetworkPolicies") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2690,18 +7059,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetClusterRequest, + request: vmwareengine.ListNetworkPoliciesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Cluster: - r"""Call the get cluster method over HTTP. + ) -> vmwareengine.ListNetworkPoliciesResponse: + r"""Call the list network policies method over HTTP. Args: - request (~.vmwareengine.GetClusterRequest): + request (~.vmwareengine.ListNetworkPoliciesRequest): The request object. Request message for - [VmwareEngine.GetCluster][google.cloud.vmwareengine.v1.VmwareEngine.GetCluster] + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2709,18 +7078,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.Cluster: - A cluster in a private cloud. + ~.vmwareengine.ListNetworkPoliciesResponse: + Response message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}", + "uri": "/v1/{parent=projects/*/locations/*}/networkPolicies", }, ] - request, metadata = self._interceptor.pre_get_cluster(request, metadata) - pb_request = vmwareengine.GetClusterRequest.pb(request) + request, metadata = self._interceptor.pre_list_network_policies( + request, metadata + ) + pb_request = vmwareengine.ListNetworkPoliciesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2754,16 +7127,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.Cluster() - pb_resp = vmwareengine_resources.Cluster.pb(resp) + resp = vmwareengine.ListNetworkPoliciesResponse() + pb_resp = vmwareengine.ListNetworkPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_cluster(resp) + resp = self._interceptor.post_list_network_policies(resp) return resp - class _GetHcxActivationKey(VmwareEngineRestStub): + class _ListNodes(VmwareEngineRestStub): def __hash__(self): - return hash("GetHcxActivationKey") + return hash("ListNodes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2777,18 +7150,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetHcxActivationKeyRequest, + request: vmwareengine.ListNodesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.HcxActivationKey: - r"""Call the get hcx activation key method over HTTP. + ) -> vmwareengine.ListNodesResponse: + r"""Call the list nodes method over HTTP. Args: - request (~.vmwareengine.GetHcxActivationKeyRequest): + request (~.vmwareengine.ListNodesRequest): The request object. Request message for - [VmwareEngine.GetHcxActivationKeys][] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2796,27 +7169,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.HcxActivationKey: - HCX activation key. A default key is created during - private cloud provisioning, but this behavior is subject - to change and you should always verify active keys. Use - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - to retrieve existing keys and - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] - to create new ones. + ~.vmwareengine.ListNodesResponse: + Response message for + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/hcxActivationKeys/*}", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*/clusters/*}/nodes", }, ] - request, metadata = self._interceptor.pre_get_hcx_activation_key( - request, metadata - ) - pb_request = vmwareengine.GetHcxActivationKeyRequest.pb(request) + request, metadata = self._interceptor.pre_list_nodes(request, metadata) + pb_request = vmwareengine.ListNodesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2850,16 +7216,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.HcxActivationKey() - pb_resp = vmwareengine_resources.HcxActivationKey.pb(resp) + resp = vmwareengine.ListNodesResponse() + pb_resp = vmwareengine.ListNodesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_hcx_activation_key(resp) + resp = self._interceptor.post_list_nodes(resp) return resp - class _GetNetworkPolicy(VmwareEngineRestStub): + class _ListNodeTypes(VmwareEngineRestStub): def __hash__(self): - return hash("GetNetworkPolicy") + return hash("ListNodeTypes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2873,18 +7239,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetNetworkPolicyRequest, + request: vmwareengine.ListNodeTypesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NetworkPolicy: - r"""Call the get network policy method over HTTP. + ) -> vmwareengine.ListNodeTypesResponse: + r"""Call the list node types method over HTTP. Args: - request (~.vmwareengine.GetNetworkPolicyRequest): + request (~.vmwareengine.ListNodeTypesRequest): The request object. Request message for - [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2892,31 +7258,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.NetworkPolicy: - Represents a network policy resource. - Network policies are regional resources. - You can use a network policy to enable - or disable internet access and external - IP access. Network policies are - associated with a VMware Engine network, - which might span across regions. For a - given region, a network policy applies - to all private clouds in the VMware - Engine network associated with the - policy. + ~.vmwareengine.ListNodeTypesResponse: + Response message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/networkPolicies/*}", + "uri": "/v1/{parent=projects/*/locations/*}/nodeTypes", }, ] - request, metadata = self._interceptor.pre_get_network_policy( - request, metadata - ) - pb_request = vmwareengine.GetNetworkPolicyRequest.pb(request) + request, metadata = self._interceptor.pre_list_node_types(request, metadata) + pb_request = vmwareengine.ListNodeTypesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -2950,16 +7305,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.NetworkPolicy() - pb_resp = vmwareengine_resources.NetworkPolicy.pb(resp) + resp = vmwareengine.ListNodeTypesResponse() + pb_resp = vmwareengine.ListNodeTypesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_network_policy(resp) + resp = self._interceptor.post_list_node_types(resp) return resp - class _GetNodeType(VmwareEngineRestStub): + class _ListPeeringRoutes(VmwareEngineRestStub): def __hash__(self): - return hash("GetNodeType") + return hash("ListPeeringRoutes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2973,18 +7328,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetNodeTypeRequest, + request: vmwareengine.ListPeeringRoutesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.NodeType: - r"""Call the get node type method over HTTP. + ) -> vmwareengine.ListPeeringRoutesResponse: + r"""Call the list peering routes method over HTTP. Args: - request (~.vmwareengine.GetNodeTypeRequest): + request (~.vmwareengine.ListPeeringRoutesRequest): The request object. Request message for - [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2992,18 +7347,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.NodeType: - Describes node type. + ~.vmwareengine.ListPeeringRoutesResponse: + Response message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/nodeTypes/*}", + "uri": "/v1/{parent=projects/*/locations/*/networkPeerings/*}/peeringRoutes", }, ] - request, metadata = self._interceptor.pre_get_node_type(request, metadata) - pb_request = vmwareengine.GetNodeTypeRequest.pb(request) + request, metadata = self._interceptor.pre_list_peering_routes( + request, metadata + ) + pb_request = vmwareengine.ListPeeringRoutesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3037,16 +7396,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.NodeType() - pb_resp = vmwareengine_resources.NodeType.pb(resp) + resp = vmwareengine.ListPeeringRoutesResponse() + pb_resp = vmwareengine.ListPeeringRoutesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_node_type(resp) + resp = self._interceptor.post_list_peering_routes(resp) return resp - class _GetPrivateCloud(VmwareEngineRestStub): + class _ListPrivateClouds(VmwareEngineRestStub): def __hash__(self): - return hash("GetPrivateCloud") + return hash("ListPrivateClouds") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3060,18 +7419,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetPrivateCloudRequest, + request: vmwareengine.ListPrivateCloudsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.PrivateCloud: - r"""Call the get private cloud method over HTTP. + ) -> vmwareengine.ListPrivateCloudsResponse: + r"""Call the list private clouds method over HTTP. Args: - request (~.vmwareengine.GetPrivateCloudRequest): + request (~.vmwareengine.ListPrivateCloudsRequest): The request object. Request message for - [VmwareEngine.GetPrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud] + [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3079,22 +7438,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.PrivateCloud: - Represents a private cloud resource. - Private clouds are zonal resources. + ~.vmwareengine.ListPrivateCloudsResponse: + Response message for + [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}", + "uri": "/v1/{parent=projects/*/locations/*}/privateClouds", }, ] - request, metadata = self._interceptor.pre_get_private_cloud( + request, metadata = self._interceptor.pre_list_private_clouds( request, metadata ) - pb_request = vmwareengine.GetPrivateCloudRequest.pb(request) + pb_request = vmwareengine.ListPrivateCloudsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3128,16 +7487,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.PrivateCloud() - pb_resp = vmwareengine_resources.PrivateCloud.pb(resp) + resp = vmwareengine.ListPrivateCloudsResponse() + pb_resp = vmwareengine.ListPrivateCloudsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_private_cloud(resp) + resp = self._interceptor.post_list_private_clouds(resp) return resp - class _GetPrivateConnection(VmwareEngineRestStub): + class _ListPrivateConnectionPeeringRoutes(VmwareEngineRestStub): def __hash__(self): - return hash("GetPrivateConnection") + return hash("ListPrivateConnectionPeeringRoutes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3151,42 +7510,47 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetPrivateConnectionRequest, + request: vmwareengine.ListPrivateConnectionPeeringRoutesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.PrivateConnection: - r"""Call the get private connection method over HTTP. + ) -> vmwareengine.ListPrivateConnectionPeeringRoutesResponse: + r"""Call the list private connection + peering routes method over HTTP. - Args: - request (~.vmwareengine.GetPrivateConnectionRequest): - The request object. Request message for - [VmwareEngine.GetPrivateConnection][google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.ListPrivateConnectionPeeringRoutesRequest): + The request object. Request message for + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.vmwareengine_resources.PrivateConnection: - Private connection resource that - provides connectivity for VMware Engine - private clouds. + Returns: + ~.vmwareengine.ListPrivateConnectionPeeringRoutesResponse: + Response message for + [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", + "uri": "/v1/{parent=projects/*/locations/*/privateConnections/*}/peeringRoutes", }, ] - request, metadata = self._interceptor.pre_get_private_connection( + ( + request, + metadata, + ) = self._interceptor.pre_list_private_connection_peering_routes( request, metadata ) - pb_request = vmwareengine.GetPrivateConnectionRequest.pb(request) + pb_request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3220,16 +7584,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.PrivateConnection() - pb_resp = vmwareengine_resources.PrivateConnection.pb(resp) + resp = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + pb_resp = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_private_connection(resp) + resp = self._interceptor.post_list_private_connection_peering_routes(resp) return resp - class _GetSubnet(VmwareEngineRestStub): + class _ListPrivateConnections(VmwareEngineRestStub): def __hash__(self): - return hash("GetSubnet") + return hash("ListPrivateConnections") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3243,18 +7607,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetSubnetRequest, + request: vmwareengine.ListPrivateConnectionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Subnet: - r"""Call the get subnet method over HTTP. + ) -> vmwareengine.ListPrivateConnectionsResponse: + r"""Call the list private connections method over HTTP. Args: - request (~.vmwareengine.GetSubnetRequest): + request (~.vmwareengine.ListPrivateConnectionsRequest): The request object. Request message for - [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3262,21 +7626,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.Subnet: - Subnet in a private cloud. Either ``management`` subnets - (such as vMotion) that are read-only, or - ``userDefined``, which can also be updated. + ~.vmwareengine.ListPrivateConnectionsResponse: + Response message for + [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/subnets/*}", + "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", }, ] - request, metadata = self._interceptor.pre_get_subnet(request, metadata) - pb_request = vmwareengine.GetSubnetRequest.pb(request) + request, metadata = self._interceptor.pre_list_private_connections( + request, metadata + ) + pb_request = vmwareengine.ListPrivateConnectionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3310,16 +7675,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.Subnet() - pb_resp = vmwareengine_resources.Subnet.pb(resp) + resp = vmwareengine.ListPrivateConnectionsResponse() + pb_resp = vmwareengine.ListPrivateConnectionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_subnet(resp) + resp = self._interceptor.post_list_private_connections(resp) return resp - class _GetVmwareEngineNetwork(VmwareEngineRestStub): + class _ListSubnets(VmwareEngineRestStub): def __hash__(self): - return hash("GetVmwareEngineNetwork") + return hash("ListSubnets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3333,18 +7698,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.GetVmwareEngineNetworkRequest, + request: vmwareengine.ListSubnetsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.VmwareEngineNetwork: - r"""Call the get vmware engine network method over HTTP. + ) -> vmwareengine.ListSubnetsResponse: + r"""Call the list subnets method over HTTP. Args: - request (~.vmwareengine.GetVmwareEngineNetworkRequest): + request (~.vmwareengine.ListSubnetsRequest): The request object. Request message for - [VmwareEngine.GetVmwareEngineNetwork][google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork] + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3352,23 +7717,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.VmwareEngineNetwork: - VMware Engine network resource that - provides connectivity for VMware Engine - private clouds. + ~.vmwareengine.ListSubnetsResponse: + Response message for + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}", + "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/subnets", }, ] - request, metadata = self._interceptor.pre_get_vmware_engine_network( - request, metadata - ) - pb_request = vmwareengine.GetVmwareEngineNetworkRequest.pb(request) + request, metadata = self._interceptor.pre_list_subnets(request, metadata) + pb_request = vmwareengine.ListSubnetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3402,16 +7764,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.VmwareEngineNetwork() - pb_resp = vmwareengine_resources.VmwareEngineNetwork.pb(resp) + resp = vmwareengine.ListSubnetsResponse() + pb_resp = vmwareengine.ListSubnetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_vmware_engine_network(resp) + resp = self._interceptor.post_list_subnets(resp) return resp - class _ListClusters(VmwareEngineRestStub): + class _ListVmwareEngineNetworks(VmwareEngineRestStub): def __hash__(self): - return hash("ListClusters") + return hash("ListVmwareEngineNetworks") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3425,39 +7787,42 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListClustersRequest, + request: vmwareengine.ListVmwareEngineNetworksRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListClustersResponse: - r"""Call the list clusters method over HTTP. + ) -> vmwareengine.ListVmwareEngineNetworksResponse: + r"""Call the list vmware engine + networks method over HTTP. - Args: - request (~.vmwareengine.ListClustersRequest): - The request object. Request message for - [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.ListVmwareEngineNetworksRequest): + The request object. Request message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.vmwareengine.ListClustersResponse: - Response message for - [VmwareEngine.ListClusters][google.cloud.vmwareengine.v1.VmwareEngine.ListClusters] + Returns: + ~.vmwareengine.ListVmwareEngineNetworksResponse: + Response message for + [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters", + "uri": "/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks", }, ] - request, metadata = self._interceptor.pre_list_clusters(request, metadata) - pb_request = vmwareengine.ListClustersRequest.pb(request) + request, metadata = self._interceptor.pre_list_vmware_engine_networks( + request, metadata + ) + pb_request = vmwareengine.ListVmwareEngineNetworksRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3491,16 +7856,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListClustersResponse() - pb_resp = vmwareengine.ListClustersResponse.pb(resp) + resp = vmwareengine.ListVmwareEngineNetworksResponse() + pb_resp = vmwareengine.ListVmwareEngineNetworksResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_clusters(resp) + resp = self._interceptor.post_list_vmware_engine_networks(resp) return resp - class _ListHcxActivationKeys(VmwareEngineRestStub): + class _RepairManagementDnsZoneBinding(VmwareEngineRestStub): def __hash__(self): - return hash("ListHcxActivationKeys") + return hash("RepairManagementDnsZoneBinding") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3514,43 +7879,56 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListHcxActivationKeysRequest, + request: vmwareengine.RepairManagementDnsZoneBindingRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListHcxActivationKeysResponse: - r"""Call the list hcx activation keys method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the repair management dns + zone binding method over HTTP. - Args: - request (~.vmwareengine.ListHcxActivationKeysRequest): - The request object. Request message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.RepairManagementDnsZoneBindingRequest): + The request object. Request message for + [VmwareEngine.RepairManagementDnsZoneBindings][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.vmwareengine.ListHcxActivationKeysResponse: - Response message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys", + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}:repair", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_hcx_activation_keys( + ( + request, + metadata, + ) = self._interceptor.pre_repair_management_dns_zone_binding( request, metadata ) - pb_request = vmwareengine.ListHcxActivationKeysRequest.pb(request) + pb_request = vmwareengine.RepairManagementDnsZoneBindingRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3574,6 +7952,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3582,16 +7961,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListHcxActivationKeysResponse() - pb_resp = vmwareengine.ListHcxActivationKeysResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_hcx_activation_keys(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_repair_management_dns_zone_binding(resp) return resp - class _ListNetworkPolicies(VmwareEngineRestStub): + class _ResetNsxCredentials(VmwareEngineRestStub): def __hash__(self): - return hash("ListNetworkPolicies") + return hash("ResetNsxCredentials") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3605,18 +7982,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListNetworkPoliciesRequest, + request: vmwareengine.ResetNsxCredentialsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListNetworkPoliciesResponse: - r"""Call the list network policies method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the reset nsx credentials method over HTTP. Args: - request (~.vmwareengine.ListNetworkPoliciesRequest): + request (~.vmwareengine.ResetNsxCredentialsRequest): The request object. Request message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3624,24 +8001,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.ListNetworkPoliciesResponse: - Response message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/networkPolicies", + "method": "post", + "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetNsxCredentials", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_network_policies( + request, metadata = self._interceptor.pre_reset_nsx_credentials( request, metadata ) - pb_request = vmwareengine.ListNetworkPoliciesRequest.pb(request) + pb_request = vmwareengine.ResetNsxCredentialsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3665,6 +8051,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3673,16 +8060,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListNetworkPoliciesResponse() - pb_resp = vmwareengine.ListNetworkPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_network_policies(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_nsx_credentials(resp) return resp - class _ListNodeTypes(VmwareEngineRestStub): + class _ResetVcenterCredentials(VmwareEngineRestStub): def __hash__(self): - return hash("ListNodeTypes") + return hash("ResetVcenterCredentials") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3696,18 +8081,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListNodeTypesRequest, + request: vmwareengine.ResetVcenterCredentialsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListNodeTypesResponse: - r"""Call the list node types method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the reset vcenter credentials method over HTTP. Args: - request (~.vmwareengine.ListNodeTypesRequest): + request (~.vmwareengine.ResetVcenterCredentialsRequest): The request object. Request message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3715,22 +8100,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.ListNodeTypesResponse: - Response message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/nodeTypes", + "method": "post", + "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetVcenterCredentials", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_node_types(request, metadata) - pb_request = vmwareengine.ListNodeTypesRequest.pb(request) + request, metadata = self._interceptor.pre_reset_vcenter_credentials( + request, metadata + ) + pb_request = vmwareengine.ResetVcenterCredentialsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3754,6 +8150,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3762,16 +8159,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListNodeTypesResponse() - pb_resp = vmwareengine.ListNodeTypesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_node_types(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_vcenter_credentials(resp) return resp - class _ListPrivateClouds(VmwareEngineRestStub): + class _RevokeDnsBindPermission(VmwareEngineRestStub): def __hash__(self): - return hash("ListPrivateClouds") + return hash("RevokeDnsBindPermission") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3785,43 +8180,53 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListPrivateCloudsRequest, + request: vmwareengine.RevokeDnsBindPermissionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListPrivateCloudsResponse: - r"""Call the list private clouds method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the revoke dns bind + permission method over HTTP. - Args: - request (~.vmwareengine.ListPrivateCloudsRequest): - The request object. Request message for - [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.RevokeDnsBindPermissionRequest): + The request object. Request message for + [VmwareEngine.RevokeDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.vmwareengine.ListPrivateCloudsResponse: - Response message for - [VmwareEngine.ListPrivateClouds][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds] + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/privateClouds", + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dnsBindPermission}:revoke", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_private_clouds( + request, metadata = self._interceptor.pre_revoke_dns_bind_permission( request, metadata ) - pb_request = vmwareengine.ListPrivateCloudsRequest.pb(request) + pb_request = vmwareengine.RevokeDnsBindPermissionRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -3845,6 +8250,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3853,16 +8259,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListPrivateCloudsResponse() - pb_resp = vmwareengine.ListPrivateCloudsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_private_clouds(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_revoke_dns_bind_permission(resp) return resp - class _ListPrivateConnectionPeeringRoutes(VmwareEngineRestStub): + class _ShowNsxCredentials(VmwareEngineRestStub): def __hash__(self): - return hash("ListPrivateConnectionPeeringRoutes") + return hash("ShowNsxCredentials") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3876,47 +8280,39 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + request: vmwareengine.ShowNsxCredentialsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListPrivateConnectionPeeringRoutesResponse: - r"""Call the list private connection - peering routes method over HTTP. - - Args: - request (~.vmwareengine.ListPrivateConnectionPeeringRoutesRequest): - The request object. Request message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + ) -> vmwareengine_resources.Credentials: + r"""Call the show nsx credentials method over HTTP. - Returns: - ~.vmwareengine.ListPrivateConnectionPeeringRoutesResponse: - Response message for - [VmwareEngine.ListPrivateConnectionPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes] + Args: + request (~.vmwareengine.ShowNsxCredentialsRequest): + The request object. Request message for + [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.vmwareengine_resources.Credentials: + Credentials for a private cloud. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/privateConnections/*}/peeringRoutes", + "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showNsxCredentials", }, ] - ( - request, - metadata, - ) = self._interceptor.pre_list_private_connection_peering_routes( + request, metadata = self._interceptor.pre_show_nsx_credentials( request, metadata ) - pb_request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest.pb( - request - ) + pb_request = vmwareengine.ShowNsxCredentialsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -3950,16 +8346,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() - pb_resp = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb(resp) + resp = vmwareengine_resources.Credentials() + pb_resp = vmwareengine_resources.Credentials.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_private_connection_peering_routes(resp) + resp = self._interceptor.post_show_nsx_credentials(resp) return resp - class _ListPrivateConnections(VmwareEngineRestStub): + class _ShowVcenterCredentials(VmwareEngineRestStub): def __hash__(self): - return hash("ListPrivateConnections") + return hash("ShowVcenterCredentials") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -3973,18 +8369,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListPrivateConnectionsRequest, + request: vmwareengine.ShowVcenterCredentialsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListPrivateConnectionsResponse: - r"""Call the list private connections method over HTTP. + ) -> vmwareengine_resources.Credentials: + r"""Call the show vcenter credentials method over HTTP. Args: - request (~.vmwareengine.ListPrivateConnectionsRequest): + request (~.vmwareengine.ShowVcenterCredentialsRequest): The request object. Request message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] + [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3992,22 +8388,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.ListPrivateConnectionsResponse: - Response message for - [VmwareEngine.ListPrivateConnections][google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections] - + ~.vmwareengine_resources.Credentials: + Credentials for a private cloud. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", + "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showVcenterCredentials", }, ] - request, metadata = self._interceptor.pre_list_private_connections( + request, metadata = self._interceptor.pre_show_vcenter_credentials( request, metadata ) - pb_request = vmwareengine.ListPrivateConnectionsRequest.pb(request) + pb_request = vmwareengine.ShowVcenterCredentialsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -4041,16 +8435,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListPrivateConnectionsResponse() - pb_resp = vmwareengine.ListPrivateConnectionsResponse.pb(resp) + resp = vmwareengine_resources.Credentials() + pb_resp = vmwareengine_resources.Credentials.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_private_connections(resp) + resp = self._interceptor.post_show_vcenter_credentials(resp) return resp - class _ListSubnets(VmwareEngineRestStub): + class _UndeletePrivateCloud(VmwareEngineRestStub): def __hash__(self): - return hash("ListSubnets") + return hash("UndeletePrivateCloud") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -4064,18 +8458,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListSubnetsRequest, + request: vmwareengine.UndeletePrivateCloudRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListSubnetsResponse: - r"""Call the list subnets method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the undelete private cloud method over HTTP. Args: - request (~.vmwareengine.ListSubnetsRequest): + request (~.vmwareengine.UndeletePrivateCloudRequest): The request object. Request message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.UndeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4083,22 +8477,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine.ListSubnetsResponse: - Response message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*/privateClouds/*}/subnets", + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}:undelete", + "body": "*", }, - ] - request, metadata = self._interceptor.pre_list_subnets(request, metadata) - pb_request = vmwareengine.ListSubnetsRequest.pb(request) + ] + request, metadata = self._interceptor.pre_undelete_private_cloud( + request, metadata + ) + pb_request = vmwareengine.UndeletePrivateCloudRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -4122,6 +8527,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4130,18 +8536,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListSubnetsResponse() - pb_resp = vmwareengine.ListSubnetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_subnets(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_undelete_private_cloud(resp) return resp - class _ListVmwareEngineNetworks(VmwareEngineRestStub): + class _UpdateCluster(VmwareEngineRestStub): def __hash__(self): - return hash("ListVmwareEngineNetworks") + return hash("UpdateCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4153,44 +8559,50 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ListVmwareEngineNetworksRequest, + request: vmwareengine.UpdateClusterRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine.ListVmwareEngineNetworksResponse: - r"""Call the list vmware engine - networks method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. - Args: - request (~.vmwareengine.ListVmwareEngineNetworksRequest): - The request object. Request message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.UpdateClusterRequest): + The request object. Request message for + [VmwareEngine.UpdateCluster][google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.vmwareengine.ListVmwareEngineNetworksResponse: - Response message for - [VmwareEngine.ListVmwareEngineNetworks][google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks] + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks", + "method": "patch", + "uri": "/v1/{cluster.name=projects/*/locations/*/privateClouds/*/clusters/*}", + "body": "cluster", }, ] - request, metadata = self._interceptor.pre_list_vmware_engine_networks( - request, metadata - ) - pb_request = vmwareengine.ListVmwareEngineNetworksRequest.pb(request) + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = vmwareengine.UpdateClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -4214,6 +8626,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4222,18 +8635,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine.ListVmwareEngineNetworksResponse() - pb_resp = vmwareengine.ListVmwareEngineNetworksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_vmware_engine_networks(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) return resp - class _ResetNsxCredentials(VmwareEngineRestStub): + class _UpdateDnsForwarding(VmwareEngineRestStub): def __hash__(self): - return hash("ResetNsxCredentials") + return hash("UpdateDnsForwarding") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4245,18 +8658,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ResetNsxCredentialsRequest, + request: vmwareengine.UpdateDnsForwardingRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the reset nsx credentials method over HTTP. + r"""Call the update dns forwarding method over HTTP. Args: - request (~.vmwareengine.ResetNsxCredentialsRequest): + request (~.vmwareengine.UpdateDnsForwardingRequest): The request object. Request message for - [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] + [VmwareEngine.UpdateDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4273,15 +8686,15 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetNsxCredentials", - "body": "*", + "method": "patch", + "uri": "/v1/{dns_forwarding.name=projects/*/locations/*/privateClouds/*/dnsForwarding}", + "body": "dns_forwarding", }, ] - request, metadata = self._interceptor.pre_reset_nsx_credentials( + request, metadata = self._interceptor.pre_update_dns_forwarding( request, metadata ) - pb_request = vmwareengine.ResetNsxCredentialsRequest.pb(request) + pb_request = vmwareengine.UpdateDnsForwardingRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4325,14 +8738,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reset_nsx_credentials(resp) + resp = self._interceptor.post_update_dns_forwarding(resp) return resp - class _ResetVcenterCredentials(VmwareEngineRestStub): + class _UpdateExternalAccessRule(VmwareEngineRestStub): def __hash__(self): - return hash("ResetVcenterCredentials") + return hash("UpdateExternalAccessRule") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4344,43 +8759,44 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ResetVcenterCredentialsRequest, + request: vmwareengine.UpdateExternalAccessRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the reset vcenter credentials method over HTTP. + r"""Call the update external access + rule method over HTTP. - Args: - request (~.vmwareengine.ResetVcenterCredentialsRequest): - The request object. Request message for - [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.vmwareengine.UpdateExternalAccessRuleRequest): + The request object. Request message for + [VmwareEngine.UpdateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetVcenterCredentials", - "body": "*", + "method": "patch", + "uri": "/v1/{external_access_rule.name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}", + "body": "external_access_rule", }, ] - request, metadata = self._interceptor.pre_reset_vcenter_credentials( + request, metadata = self._interceptor.pre_update_external_access_rule( request, metadata ) - pb_request = vmwareengine.ResetVcenterCredentialsRequest.pb(request) + pb_request = vmwareengine.UpdateExternalAccessRuleRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4424,14 +8840,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reset_vcenter_credentials(resp) + resp = self._interceptor.post_update_external_access_rule(resp) return resp - class _ShowNsxCredentials(VmwareEngineRestStub): + class _UpdateExternalAddress(VmwareEngineRestStub): def __hash__(self): - return hash("ShowNsxCredentials") + return hash("UpdateExternalAddress") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4443,18 +8861,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ShowNsxCredentialsRequest, + request: vmwareengine.UpdateExternalAddressRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Call the show nsx credentials method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update external address method over HTTP. Args: - request (~.vmwareengine.ShowNsxCredentialsRequest): + request (~.vmwareengine.UpdateExternalAddressRequest): The request object. Request message for - [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] + [VmwareEngine.UpdateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4462,22 +8880,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.Credentials: - Credentials for a private cloud. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showNsxCredentials", + "method": "patch", + "uri": "/v1/{external_address.name=projects/*/locations/*/privateClouds/*/externalAddresses/*}", + "body": "external_address", }, ] - request, metadata = self._interceptor.pre_show_nsx_credentials( + request, metadata = self._interceptor.pre_update_external_address( request, metadata ) - pb_request = vmwareengine.ShowNsxCredentialsRequest.pb(request) + pb_request = vmwareengine.UpdateExternalAddressRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -4501,6 +8930,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4509,18 +8939,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.Credentials() - pb_resp = vmwareengine_resources.Credentials.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_show_nsx_credentials(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_external_address(resp) return resp - class _ShowVcenterCredentials(VmwareEngineRestStub): + class _UpdateLoggingServer(VmwareEngineRestStub): def __hash__(self): - return hash("ShowVcenterCredentials") + return hash("UpdateLoggingServer") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4532,18 +8962,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.ShowVcenterCredentialsRequest, + request: vmwareengine.UpdateLoggingServerRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> vmwareengine_resources.Credentials: - r"""Call the show vcenter credentials method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update logging server method over HTTP. Args: - request (~.vmwareengine.ShowVcenterCredentialsRequest): + request (~.vmwareengine.UpdateLoggingServerRequest): The request object. Request message for - [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] + [VmwareEngine.UpdateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4551,22 +8981,33 @@ def __call__( sent along with the request as metadata. Returns: - ~.vmwareengine_resources.Credentials: - Credentials for a private cloud. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showVcenterCredentials", + "method": "patch", + "uri": "/v1/{logging_server.name=projects/*/locations/*/privateClouds/*/loggingServers/*}", + "body": "logging_server", }, ] - request, metadata = self._interceptor.pre_show_vcenter_credentials( + request, metadata = self._interceptor.pre_update_logging_server( request, metadata ) - pb_request = vmwareengine.ShowVcenterCredentialsRequest.pb(request) + pb_request = vmwareengine.UpdateLoggingServerRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -4590,6 +9031,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4598,18 +9040,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = vmwareengine_resources.Credentials() - pb_resp = vmwareengine_resources.Credentials.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_show_vcenter_credentials(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_logging_server(resp) return resp - class _UndeletePrivateCloud(VmwareEngineRestStub): + class _UpdateManagementDnsZoneBinding(VmwareEngineRestStub): def __hash__(self): - return hash("UndeletePrivateCloud") + return hash("UpdateManagementDnsZoneBinding") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -4621,43 +9063,47 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.UndeletePrivateCloudRequest, + request: vmwareengine.UpdateManagementDnsZoneBindingRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the undelete private cloud method over HTTP. - - Args: - request (~.vmwareengine.UndeletePrivateCloudRequest): - The request object. Request message for - [VmwareEngine.UndeletePrivateCloud][google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + r"""Call the update management dns + zone binding method over HTTP. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Args: + request (~.vmwareengine.UpdateManagementDnsZoneBindingRequest): + The request object. Request message for + [VmwareEngine.UpdateManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/privateClouds/*}:undelete", - "body": "*", + "method": "patch", + "uri": "/v1/{management_dns_zone_binding.name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}", + "body": "management_dns_zone_binding", }, ] - request, metadata = self._interceptor.pre_undelete_private_cloud( + ( + request, + metadata, + ) = self._interceptor.pre_update_management_dns_zone_binding( request, metadata ) - pb_request = vmwareengine.UndeletePrivateCloudRequest.pb(request) + pb_request = vmwareengine.UpdateManagementDnsZoneBindingRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4701,12 +9147,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_undelete_private_cloud(resp) + resp = self._interceptor.post_update_management_dns_zone_binding(resp) return resp - class _UpdateCluster(VmwareEngineRestStub): + class _UpdateNetworkPeering(VmwareEngineRestStub): def __hash__(self): - return hash("UpdateCluster") + return hash("UpdateNetworkPeering") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, @@ -4722,18 +9168,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: vmwareengine.UpdateClusterRequest, + request: vmwareengine.UpdateNetworkPeeringRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the update cluster method over HTTP. + r"""Call the update network peering method over HTTP. Args: - request (~.vmwareengine.UpdateClusterRequest): + request (~.vmwareengine.UpdateNetworkPeeringRequest): The request object. Request message for - [VmwareEngine.UpdateCluster][google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster] + [VmwareEngine.UpdateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4751,12 +9197,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "patch", - "uri": "/v1/{cluster.name=projects/*/locations/*/privateClouds/*/clusters/*}", - "body": "cluster", + "uri": "/v1/{network_peering.name=projects/*/locations/*/networkPeerings/*}", + "body": "network_peering", }, ] - request, metadata = self._interceptor.pre_update_cluster(request, metadata) - pb_request = vmwareengine.UpdateClusterRequest.pb(request) + request, metadata = self._interceptor.pre_update_network_peering( + request, metadata + ) + pb_request = vmwareengine.UpdateNetworkPeeringRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -4800,7 +9248,7 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_cluster(resp) + resp = self._interceptor.post_update_network_peering(resp) return resp class _UpdateNetworkPolicy(VmwareEngineRestStub): @@ -5315,6 +9763,26 @@ def create_cluster( # In C++ this would require a dynamic_cast return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def create_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAccessRuleRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateExternalAccessRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_external_address( + self, + ) -> Callable[ + [vmwareengine.CreateExternalAddressRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateExternalAddress(self._session, self._host, self._interceptor) # type: ignore + @property def create_hcx_activation_key( self, @@ -5325,6 +9793,32 @@ def create_hcx_activation_key( # In C++ this would require a dynamic_cast return self._CreateHcxActivationKey(self._session, self._host, self._interceptor) # type: ignore + @property + def create_logging_server( + self, + ) -> Callable[[vmwareengine.CreateLoggingServerRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateLoggingServer(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.CreateManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateManagementDnsZoneBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_network_peering( + self, + ) -> Callable[[vmwareengine.CreateNetworkPeeringRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateNetworkPeering(self._session, self._host, self._interceptor) # type: ignore + @property def create_network_policy( self, @@ -5369,6 +9863,52 @@ def delete_cluster( # In C++ this would require a dynamic_cast return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAccessRuleRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteExternalAccessRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_external_address( + self, + ) -> Callable[ + [vmwareengine.DeleteExternalAddressRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteExternalAddress(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_logging_server( + self, + ) -> Callable[[vmwareengine.DeleteLoggingServerRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLoggingServer(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.DeleteManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteManagementDnsZoneBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_network_peering( + self, + ) -> Callable[[vmwareengine.DeleteNetworkPeeringRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteNetworkPeering(self._session, self._host, self._interceptor) # type: ignore + @property def delete_network_policy( self, @@ -5405,6 +9945,17 @@ def delete_vmware_engine_network( # In C++ this would require a dynamic_cast return self._DeleteVmwareEngineNetwork(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_network_policy_external_addresses( + self, + ) -> Callable[ + [vmwareengine.FetchNetworkPolicyExternalAddressesRequest], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchNetworkPolicyExternalAddresses(self._session, self._host, self._interceptor) # type: ignore + @property def get_cluster( self, @@ -5413,6 +9964,48 @@ def get_cluster( # In C++ this would require a dynamic_cast return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def get_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GetDnsBindPermissionRequest], + vmwareengine_resources.DnsBindPermission, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDnsBindPermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dns_forwarding( + self, + ) -> Callable[ + [vmwareengine.GetDnsForwardingRequest], vmwareengine_resources.DnsForwarding + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDnsForwarding(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.GetExternalAccessRuleRequest], + vmwareengine_resources.ExternalAccessRule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetExternalAccessRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_external_address( + self, + ) -> Callable[ + [vmwareengine.GetExternalAddressRequest], vmwareengine_resources.ExternalAddress + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetExternalAddress(self._session, self._host, self._interceptor) # type: ignore + @property def get_hcx_activation_key( self, @@ -5424,6 +10017,37 @@ def get_hcx_activation_key( # In C++ this would require a dynamic_cast return self._GetHcxActivationKey(self._session, self._host, self._interceptor) # type: ignore + @property + def get_logging_server( + self, + ) -> Callable[ + [vmwareengine.GetLoggingServerRequest], vmwareengine_resources.LoggingServer + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLoggingServer(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.GetManagementDnsZoneBindingRequest], + vmwareengine_resources.ManagementDnsZoneBinding, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetManagementDnsZoneBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_network_peering( + self, + ) -> Callable[ + [vmwareengine.GetNetworkPeeringRequest], vmwareengine_resources.NetworkPeering + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNetworkPeering(self._session, self._host, self._interceptor) # type: ignore + @property def get_network_policy( self, @@ -5434,6 +10058,14 @@ def get_network_policy( # In C++ this would require a dynamic_cast return self._GetNetworkPolicy(self._session, self._host, self._interceptor) # type: ignore + @property + def get_node( + self, + ) -> Callable[[vmwareengine.GetNodeRequest], vmwareengine_resources.Node]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNode(self._session, self._host, self._interceptor) # type: ignore + @property def get_node_type( self, @@ -5482,6 +10114,16 @@ def get_vmware_engine_network( # In C++ this would require a dynamic_cast return self._GetVmwareEngineNetwork(self._session, self._host, self._interceptor) # type: ignore + @property + def grant_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.GrantDnsBindPermissionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GrantDnsBindPermission(self._session, self._host, self._interceptor) # type: ignore + @property def list_clusters( self, @@ -5492,6 +10134,28 @@ def list_clusters( # In C++ this would require a dynamic_cast return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + @property + def list_external_access_rules( + self, + ) -> Callable[ + [vmwareengine.ListExternalAccessRulesRequest], + vmwareengine.ListExternalAccessRulesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExternalAccessRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_external_addresses( + self, + ) -> Callable[ + [vmwareengine.ListExternalAddressesRequest], + vmwareengine.ListExternalAddressesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExternalAddresses(self._session, self._host, self._interceptor) # type: ignore + @property def list_hcx_activation_keys( self, @@ -5503,6 +10167,39 @@ def list_hcx_activation_keys( # In C++ this would require a dynamic_cast return self._ListHcxActivationKeys(self._session, self._host, self._interceptor) # type: ignore + @property + def list_logging_servers( + self, + ) -> Callable[ + [vmwareengine.ListLoggingServersRequest], + vmwareengine.ListLoggingServersResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLoggingServers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_management_dns_zone_bindings( + self, + ) -> Callable[ + [vmwareengine.ListManagementDnsZoneBindingsRequest], + vmwareengine.ListManagementDnsZoneBindingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListManagementDnsZoneBindings(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_network_peerings( + self, + ) -> Callable[ + [vmwareengine.ListNetworkPeeringsRequest], + vmwareengine.ListNetworkPeeringsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworkPeerings(self._session, self._host, self._interceptor) # type: ignore + @property def list_network_policies( self, @@ -5514,6 +10211,14 @@ def list_network_policies( # In C++ this would require a dynamic_cast return self._ListNetworkPolicies(self._session, self._host, self._interceptor) # type: ignore + @property + def list_nodes( + self, + ) -> Callable[[vmwareengine.ListNodesRequest], vmwareengine.ListNodesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNodes(self._session, self._host, self._interceptor) # type: ignore + @property def list_node_types( self, @@ -5524,6 +10229,16 @@ def list_node_types( # In C++ this would require a dynamic_cast return self._ListNodeTypes(self._session, self._host, self._interceptor) # type: ignore + @property + def list_peering_routes( + self, + ) -> Callable[ + [vmwareengine.ListPeeringRoutesRequest], vmwareengine.ListPeeringRoutesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPeeringRoutes(self._session, self._host, self._interceptor) # type: ignore + @property def list_private_clouds( self, @@ -5575,6 +10290,16 @@ def list_vmware_engine_networks( # In C++ this would require a dynamic_cast return self._ListVmwareEngineNetworks(self._session, self._host, self._interceptor) # type: ignore + @property + def repair_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.RepairManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RepairManagementDnsZoneBinding(self._session, self._host, self._interceptor) # type: ignore + @property def reset_nsx_credentials( self, @@ -5593,6 +10318,16 @@ def reset_vcenter_credentials( # In C++ this would require a dynamic_cast return self._ResetVcenterCredentials(self._session, self._host, self._interceptor) # type: ignore + @property + def revoke_dns_bind_permission( + self, + ) -> Callable[ + [vmwareengine.RevokeDnsBindPermissionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RevokeDnsBindPermission(self._session, self._host, self._interceptor) # type: ignore + @property def show_nsx_credentials( self, @@ -5629,6 +10364,60 @@ def update_cluster( # In C++ this would require a dynamic_cast return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def update_dns_forwarding( + self, + ) -> Callable[[vmwareengine.UpdateDnsForwardingRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDnsForwarding(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_external_access_rule( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAccessRuleRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateExternalAccessRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_external_address( + self, + ) -> Callable[ + [vmwareengine.UpdateExternalAddressRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateExternalAddress(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_logging_server( + self, + ) -> Callable[[vmwareengine.UpdateLoggingServerRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLoggingServer(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_management_dns_zone_binding( + self, + ) -> Callable[ + [vmwareengine.UpdateManagementDnsZoneBindingRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateManagementDnsZoneBinding(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_network_peering( + self, + ) -> Callable[[vmwareengine.UpdateNetworkPeeringRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateNetworkPeering(self._session, self._host, self._interceptor) # type: ignore + @property def update_network_policy( self, diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py index ea61a577871a..b12bc0aa1106 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/__init__.py @@ -15,32 +15,67 @@ # from .vmwareengine import ( CreateClusterRequest, + CreateExternalAccessRuleRequest, + CreateExternalAddressRequest, CreateHcxActivationKeyRequest, + CreateLoggingServerRequest, + CreateManagementDnsZoneBindingRequest, + CreateNetworkPeeringRequest, CreateNetworkPolicyRequest, CreatePrivateCloudRequest, CreatePrivateConnectionRequest, CreateVmwareEngineNetworkRequest, DeleteClusterRequest, + DeleteExternalAccessRuleRequest, + DeleteExternalAddressRequest, + DeleteLoggingServerRequest, + DeleteManagementDnsZoneBindingRequest, + DeleteNetworkPeeringRequest, DeleteNetworkPolicyRequest, DeletePrivateCloudRequest, DeletePrivateConnectionRequest, DeleteVmwareEngineNetworkRequest, + FetchNetworkPolicyExternalAddressesRequest, + FetchNetworkPolicyExternalAddressesResponse, GetClusterRequest, + GetDnsBindPermissionRequest, + GetDnsForwardingRequest, + GetExternalAccessRuleRequest, + GetExternalAddressRequest, GetHcxActivationKeyRequest, + GetLoggingServerRequest, + GetManagementDnsZoneBindingRequest, + GetNetworkPeeringRequest, GetNetworkPolicyRequest, + GetNodeRequest, GetNodeTypeRequest, GetPrivateCloudRequest, GetPrivateConnectionRequest, GetSubnetRequest, GetVmwareEngineNetworkRequest, + GrantDnsBindPermissionRequest, ListClustersRequest, ListClustersResponse, + ListExternalAccessRulesRequest, + ListExternalAccessRulesResponse, + ListExternalAddressesRequest, + ListExternalAddressesResponse, ListHcxActivationKeysRequest, ListHcxActivationKeysResponse, + ListLoggingServersRequest, + ListLoggingServersResponse, + ListManagementDnsZoneBindingsRequest, + ListManagementDnsZoneBindingsResponse, + ListNetworkPeeringsRequest, + ListNetworkPeeringsResponse, ListNetworkPoliciesRequest, ListNetworkPoliciesResponse, + ListNodesRequest, + ListNodesResponse, ListNodeTypesRequest, ListNodeTypesResponse, + ListPeeringRoutesRequest, + ListPeeringRoutesResponse, ListPrivateCloudsRequest, ListPrivateCloudsResponse, ListPrivateConnectionPeeringRoutesRequest, @@ -52,12 +87,20 @@ ListVmwareEngineNetworksRequest, ListVmwareEngineNetworksResponse, OperationMetadata, + RepairManagementDnsZoneBindingRequest, ResetNsxCredentialsRequest, ResetVcenterCredentialsRequest, + RevokeDnsBindPermissionRequest, ShowNsxCredentialsRequest, ShowVcenterCredentialsRequest, UndeletePrivateCloudRequest, UpdateClusterRequest, + UpdateDnsForwardingRequest, + UpdateExternalAccessRuleRequest, + UpdateExternalAddressRequest, + UpdateLoggingServerRequest, + UpdateManagementDnsZoneBindingRequest, + UpdateNetworkPeeringRequest, UpdateNetworkPolicyRequest, UpdatePrivateCloudRequest, UpdatePrivateConnectionRequest, @@ -67,16 +110,27 @@ from .vmwareengine_resources import ( Cluster, Credentials, + DnsBindPermission, + DnsForwarding, + ExternalAccessRule, + ExternalAddress, Hcx, HcxActivationKey, + LocationMetadata, + LoggingServer, + ManagementDnsZoneBinding, NetworkConfig, + NetworkPeering, NetworkPolicy, + Node, NodeType, NodeTypeConfig, Nsx, PeeringRoute, + Principal, PrivateCloud, PrivateConnection, + StretchedClusterConfig, Subnet, Vcenter, VmwareEngineNetwork, @@ -84,32 +138,67 @@ __all__ = ( "CreateClusterRequest", + "CreateExternalAccessRuleRequest", + "CreateExternalAddressRequest", "CreateHcxActivationKeyRequest", + "CreateLoggingServerRequest", + "CreateManagementDnsZoneBindingRequest", + "CreateNetworkPeeringRequest", "CreateNetworkPolicyRequest", "CreatePrivateCloudRequest", "CreatePrivateConnectionRequest", "CreateVmwareEngineNetworkRequest", "DeleteClusterRequest", + "DeleteExternalAccessRuleRequest", + "DeleteExternalAddressRequest", + "DeleteLoggingServerRequest", + "DeleteManagementDnsZoneBindingRequest", + "DeleteNetworkPeeringRequest", "DeleteNetworkPolicyRequest", "DeletePrivateCloudRequest", "DeletePrivateConnectionRequest", "DeleteVmwareEngineNetworkRequest", + "FetchNetworkPolicyExternalAddressesRequest", + "FetchNetworkPolicyExternalAddressesResponse", "GetClusterRequest", + "GetDnsBindPermissionRequest", + "GetDnsForwardingRequest", + "GetExternalAccessRuleRequest", + "GetExternalAddressRequest", "GetHcxActivationKeyRequest", + "GetLoggingServerRequest", + "GetManagementDnsZoneBindingRequest", + "GetNetworkPeeringRequest", "GetNetworkPolicyRequest", + "GetNodeRequest", "GetNodeTypeRequest", "GetPrivateCloudRequest", "GetPrivateConnectionRequest", "GetSubnetRequest", "GetVmwareEngineNetworkRequest", + "GrantDnsBindPermissionRequest", "ListClustersRequest", "ListClustersResponse", + "ListExternalAccessRulesRequest", + "ListExternalAccessRulesResponse", + "ListExternalAddressesRequest", + "ListExternalAddressesResponse", "ListHcxActivationKeysRequest", "ListHcxActivationKeysResponse", + "ListLoggingServersRequest", + "ListLoggingServersResponse", + "ListManagementDnsZoneBindingsRequest", + "ListManagementDnsZoneBindingsResponse", + "ListNetworkPeeringsRequest", + "ListNetworkPeeringsResponse", "ListNetworkPoliciesRequest", "ListNetworkPoliciesResponse", + "ListNodesRequest", + "ListNodesResponse", "ListNodeTypesRequest", "ListNodeTypesResponse", + "ListPeeringRoutesRequest", + "ListPeeringRoutesResponse", "ListPrivateCloudsRequest", "ListPrivateCloudsResponse", "ListPrivateConnectionPeeringRoutesRequest", @@ -121,12 +210,20 @@ "ListVmwareEngineNetworksRequest", "ListVmwareEngineNetworksResponse", "OperationMetadata", + "RepairManagementDnsZoneBindingRequest", "ResetNsxCredentialsRequest", "ResetVcenterCredentialsRequest", + "RevokeDnsBindPermissionRequest", "ShowNsxCredentialsRequest", "ShowVcenterCredentialsRequest", "UndeletePrivateCloudRequest", "UpdateClusterRequest", + "UpdateDnsForwardingRequest", + "UpdateExternalAccessRuleRequest", + "UpdateExternalAddressRequest", + "UpdateLoggingServerRequest", + "UpdateManagementDnsZoneBindingRequest", + "UpdateNetworkPeeringRequest", "UpdateNetworkPolicyRequest", "UpdatePrivateCloudRequest", "UpdatePrivateConnectionRequest", @@ -134,16 +231,27 @@ "UpdateVmwareEngineNetworkRequest", "Cluster", "Credentials", + "DnsBindPermission", + "DnsForwarding", + "ExternalAccessRule", + "ExternalAddress", "Hcx", "HcxActivationKey", + "LocationMetadata", + "LoggingServer", + "ManagementDnsZoneBinding", "NetworkConfig", + "NetworkPeering", "NetworkPolicy", + "Node", "NodeType", "NodeTypeConfig", "Nsx", "PeeringRoute", + "Principal", "PrivateCloud", "PrivateConnection", + "StretchedClusterConfig", "Subnet", "Vcenter", "VmwareEngineNetwork", diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py index 6d2166e4e9bb..f5c18fe89da6 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine.py @@ -39,10 +39,33 @@ "CreateClusterRequest", "UpdateClusterRequest", "DeleteClusterRequest", + "ListNodesRequest", + "ListNodesResponse", + "GetNodeRequest", + "ListExternalAddressesRequest", + "ListExternalAddressesResponse", + "FetchNetworkPolicyExternalAddressesRequest", + "FetchNetworkPolicyExternalAddressesResponse", + "GetExternalAddressRequest", + "CreateExternalAddressRequest", + "UpdateExternalAddressRequest", + "DeleteExternalAddressRequest", "ListSubnetsRequest", "ListSubnetsResponse", "GetSubnetRequest", "UpdateSubnetRequest", + "ListExternalAccessRulesRequest", + "ListExternalAccessRulesResponse", + "GetExternalAccessRuleRequest", + "CreateExternalAccessRuleRequest", + "UpdateExternalAccessRuleRequest", + "DeleteExternalAccessRuleRequest", + "ListLoggingServersRequest", + "ListLoggingServersResponse", + "GetLoggingServerRequest", + "CreateLoggingServerRequest", + "UpdateLoggingServerRequest", + "DeleteLoggingServerRequest", "OperationMetadata", "ListNodeTypesRequest", "ListNodeTypesResponse", @@ -55,12 +78,29 @@ "ListHcxActivationKeysRequest", "GetHcxActivationKeyRequest", "CreateHcxActivationKeyRequest", + "GetDnsForwardingRequest", + "UpdateDnsForwardingRequest", + "CreateNetworkPeeringRequest", + "DeleteNetworkPeeringRequest", + "GetNetworkPeeringRequest", + "ListNetworkPeeringsRequest", + "UpdateNetworkPeeringRequest", + "ListNetworkPeeringsResponse", + "ListPeeringRoutesRequest", + "ListPeeringRoutesResponse", "ListNetworkPoliciesRequest", "ListNetworkPoliciesResponse", "GetNetworkPolicyRequest", "UpdateNetworkPolicyRequest", "CreateNetworkPolicyRequest", "DeleteNetworkPolicyRequest", + "ListManagementDnsZoneBindingsRequest", + "ListManagementDnsZoneBindingsResponse", + "GetManagementDnsZoneBindingRequest", + "CreateManagementDnsZoneBindingRequest", + "UpdateManagementDnsZoneBindingRequest", + "DeleteManagementDnsZoneBindingRequest", + "RepairManagementDnsZoneBindingRequest", "CreateVmwareEngineNetworkRequest", "UpdateVmwareEngineNetworkRequest", "DeleteVmwareEngineNetworkRequest", @@ -75,6 +115,9 @@ "DeletePrivateConnectionRequest", "ListPrivateConnectionPeeringRoutesRequest", "ListPrivateConnectionPeeringRoutesResponse", + "GrantDnsBindPermissionRequest", + "RevokeDnsBindPermissionRequest", + "GetDnsBindPermissionRequest", }, ) @@ -674,31 +717,30 @@ class DeleteClusterRequest(proto.Message): ) -class ListSubnetsRequest(proto.Message): +class ListNodesRequest(proto.Message): r"""Request message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] Attributes: parent (str): - Required. The resource name of the private cloud to be - queried for subnets. Resource names are schemeless URIs that - follow the conventions in + Required. The resource name of the cluster to be queried for + nodes. Resource names are schemeless URIs that follow the + conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster`` page_size (int): - The maximum number of subnets to return in - one page. The service may return fewer than this + The maximum number of nodes to return in one + page. The service may return fewer than this value. The maximum value is coerced to 1000. The default value of this field is 500. page_token (str): - A page token, received from a previous - ``ListSubnetsRequest`` call. Provide this to retrieve the - subsequent page. + A page token, received from a previous ``ListNodes`` call. + Provide this to retrieve the subsequent page. When paginating, all other parameters provided to - ``ListSubnetsRequest`` must match the call that provided the - page token. + ``ListNodes`` must match the call that provided the page + token. """ parent: str = proto.Field( @@ -715,53 +757,43 @@ class ListSubnetsRequest(proto.Message): ) -class ListSubnetsResponse(proto.Message): +class ListNodesResponse(proto.Message): r"""Response message for - [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + [VmwareEngine.ListNodes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodes] Attributes: - subnets (MutableSequence[google.cloud.vmwareengine_v1.types.Subnet]): - A list of subnets. + nodes (MutableSequence[google.cloud.vmwareengine_v1.types.Node]): + The nodes. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. - unreachable (MutableSequence[str]): - Locations that could not be reached when - making an aggregated query using wildcards. """ @property def raw_page(self): return self - subnets: MutableSequence[vmwareengine_resources.Subnet] = proto.RepeatedField( + nodes: MutableSequence[vmwareengine_resources.Node] = proto.RepeatedField( proto.MESSAGE, number=1, - message=vmwareengine_resources.Subnet, + message=vmwareengine_resources.Node, ) next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) -class GetSubnetRequest(proto.Message): +class GetNodeRequest(proto.Message): r"""Request message for - [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] + [VmwareEngine.GetNode][google.cloud.vmwareengine.v1.VmwareEngine.GetNode] Attributes: name (str): - Required. The resource name of the subnet to retrieve. - Resource names are schemeless URIs that follow the - conventions in - https://cloud.google.com/apis/design/resource_names. For + Required. The resource name of the node to retrieve. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` + ``projects/{project}/locations/{location}/privateClouds/{private_cloud}/clusters/{cluster}/nodes/{node}`` """ name: str = proto.Field( @@ -770,119 +802,32 @@ class GetSubnetRequest(proto.Message): ) -class UpdateSubnetRequest(proto.Message): - r"""Request message for - [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ``Subnet`` resource by the update. The - fields specified in the ``update_mask`` are relative to the - resource, not the full request. A field will be overwritten - if it is in the mask. If the user does not provide a mask - then all fields will be overwritten. - subnet (google.cloud.vmwareengine_v1.types.Subnet): - Required. Subnet description. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - subnet: vmwareengine_resources.Subnet = proto.Field( - proto.MESSAGE, - number=2, - message=vmwareengine_resources.Subnet, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. True if the user has requested cancellation of - the operation; false otherwise. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListNodeTypesRequest(proto.Message): +class ListExternalAddressesRequest(proto.Message): r"""Request message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] Attributes: parent (str): - Required. The resource name of the location to be queried - for node types. Resource names are schemeless URIs that - follow the conventions in + Required. The resource name of the private cloud to be + queried for external IP addresses. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1-a`` + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` page_size (int): - The maximum number of node types to return in - one page. The service may return fewer than this - value. The maximum value is coerced to 1000. + The maximum number of external IP addresses + to return in one page. The service may return + fewer than this value. The maximum value is + coerced to 1000. The default value of this field is 500. page_token (str): - A page token, received from a previous ``ListNodeTypes`` - call. Provide this to retrieve the subsequent page. + A page token, received from a previous + ``ListExternalAddresses`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters provided to - ``ListNodeTypes`` must match the call that provided the page - token. + ``ListExternalAddresses`` must match the call that provided + the page token. filter (str): A filter expression that matches resources returned in the response. The expression must specify the field name, a @@ -891,17 +836,17 @@ class ListNodeTypesRequest(proto.Message): boolean. The comparison operator must be ``=``, ``!=``, ``>``, or ``<``. - For example, if you are filtering a list of node types, you - can exclude the ones named ``standard-72`` by specifying - ``name != "standard-72"``. + For example, if you are filtering a list of IP addresses, + you can exclude the ones named ``example-ip`` by specifying + ``name != "example-ip"``. To filter on multiple expressions, provide each separate expression within parentheses. For example: :: - (name = "standard-72") - (virtual_cpu_count > 2) + (name = "example-ip") + (createTime > "2021-04-12T08:15:10.40Z") By default, each expression is an ``AND`` expression. However, you can include ``AND`` and ``OR`` expressions @@ -909,9 +854,15 @@ class ListNodeTypesRequest(proto.Message): :: - (name = "standard-96") AND - (virtual_cpu_count > 2) OR - (name = "standard-72") + (name = "example-ip-1") AND + (createTime > "2021-04-12T08:15:10.40Z") OR + (name = "example-ip-2") + order_by (str): + Sorts list results by a certain order. By default, returned + results are ordered by ``name`` in ascending order. You can + also sort results in descending order based on the ``name`` + value using ``orderBy="name desc"``. Currently, only + ordering by ``name`` is supported. """ parent: str = proto.Field( @@ -930,15 +881,19 @@ class ListNodeTypesRequest(proto.Message): proto.STRING, number=4, ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) -class ListNodeTypesResponse(proto.Message): +class ListExternalAddressesResponse(proto.Message): r"""Response message for - [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + [VmwareEngine.ListExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses] Attributes: - node_types (MutableSequence[google.cloud.vmwareengine_v1.types.NodeType]): - A list of Node Types. + external_addresses (MutableSequence[google.cloud.vmwareengine_v1.types.ExternalAddress]): + A list of external IP addresses. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent @@ -952,10 +907,12 @@ class ListNodeTypesResponse(proto.Message): def raw_page(self): return self - node_types: MutableSequence[vmwareengine_resources.NodeType] = proto.RepeatedField( + external_addresses: MutableSequence[ + vmwareengine_resources.ExternalAddress + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message=vmwareengine_resources.NodeType, + message=vmwareengine_resources.ExternalAddress, ) next_page_token: str = proto.Field( proto.STRING, @@ -967,78 +924,130 @@ def raw_page(self): ) -class GetNodeTypeRequest(proto.Message): +class FetchNetworkPolicyExternalAddressesRequest(proto.Message): r"""Request message for - [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] Attributes: - name (str): - Required. The resource name of the node type to retrieve. - Resource names are schemeless URIs that follow the - conventions in + network_policy (str): + Required. The resource name of the network policy to query + for assigned external IP addresses. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` + page_size (int): + The maximum number of external IP addresses + to return in one page. The service may return + fewer than this value. The maximum value is + coerced to 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``FetchNetworkPolicyExternalAddresses`` call. Provide this + to retrieve the subsequent page. + + When paginating, all parameters provided to + ``FetchNetworkPolicyExternalAddresses``, except for + ``page_size`` and ``page_token``, must match the call that + provided the page token. """ - name: str = proto.Field( + network_policy: str = proto.Field( proto.STRING, number=1, ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) -class ShowNsxCredentialsRequest(proto.Message): - r"""Request message for - [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] +class FetchNetworkPolicyExternalAddressesResponse(proto.Message): + r"""Response message for + [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses] Attributes: - private_cloud (str): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless URIs - that follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + external_addresses (MutableSequence[google.cloud.vmwareengine_v1.types.ExternalAddress]): + A list of external IP addresses assigned to + VMware workload VMs within the scope of the + given network policy. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. """ - private_cloud: str = proto.Field( - proto.STRING, + @property + def raw_page(self): + return self + + external_addresses: MutableSequence[ + vmwareengine_resources.ExternalAddress + ] = proto.RepeatedField( + proto.MESSAGE, number=1, + message=vmwareengine_resources.ExternalAddress, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, ) -class ShowVcenterCredentialsRequest(proto.Message): +class GetExternalAddressRequest(proto.Message): r"""Request message for - [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] + [VmwareEngine.GetExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress] Attributes: - private_cloud (str): - Required. The resource name of the private cloud to be - queried for credentials. Resource names are schemeless URIs - that follow the conventions in + name (str): + Required. The resource name of the external IP address to + retrieve. Resource names are schemeless URIs that follow the + conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` """ - private_cloud: str = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) -class ResetNsxCredentialsRequest(proto.Message): +class CreateExternalAddressRequest(proto.Message): r"""Request message for - [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] + [VmwareEngine.CreateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress] Attributes: - private_cloud (str): - Required. The resource name of the private cloud to reset - credentials for. Resource names are schemeless URIs that - follow the conventions in + parent (str): + Required. The resource name of the private cloud to create a + new external IP address in. Resource names are schemeless + URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + external_address (google.cloud.vmwareengine_v1.types.ExternalAddress): + Required. The initial description of a new + external IP address. + external_address_id (str): + Required. The user-provided identifier of the + ``ExternalAddress`` to be created. This identifier must be + unique among ``ExternalAddress`` resources within the parent + and becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must @@ -1051,9 +1060,9 @@ class ResetNsxCredentialsRequest(proto.Message): For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1062,28 +1071,39 @@ class ResetNsxCredentialsRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - private_cloud: str = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) + external_address: vmwareengine_resources.ExternalAddress = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ExternalAddress, + ) + external_address_id: str = proto.Field( + proto.STRING, + number=3, + ) request_id: str = proto.Field( proto.STRING, - number=2, + number=4, ) -class ResetVcenterCredentialsRequest(proto.Message): +class UpdateExternalAddressRequest(proto.Message): r"""Request message for - [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] + [VmwareEngine.UpdateExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress] Attributes: - private_cloud (str): - Required. The resource name of the private cloud to reset - credentials for. Resource names are schemeless URIs that - follow the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAddress`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field will + be overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + external_address (google.cloud.vmwareengine_v1.types.ExternalAddress): + Required. External IP address description. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must @@ -1096,9 +1116,9 @@ class ResetVcenterCredentialsRequest(proto.Message): For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1107,79 +1127,92 @@ class ResetVcenterCredentialsRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - private_cloud: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, + ) + external_address: vmwareengine_resources.ExternalAddress = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ExternalAddress, ) request_id: str = proto.Field( proto.STRING, - number=2, + number=3, ) -class ListHcxActivationKeysResponse(proto.Message): - r"""Response message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] +class DeleteExternalAddressRequest(proto.Message): + r"""Request message for + [VmwareEngine.DeleteExternalAddress][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress] Attributes: - hcx_activation_keys (MutableSequence[google.cloud.vmwareengine_v1.types.HcxActivationKey]): - List of HCX activation keys. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - unreachable (MutableSequence[str]): - Locations that could not be reached when - making an aggregated query using wildcards. - """ + name (str): + Required. The resource name of the external IP address to + delete. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-ip`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. - @property - def raw_page(self): - return self + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. - hcx_activation_keys: MutableSequence[ - vmwareengine_resources.HcxActivationKey - ] = proto.RepeatedField( - proto.MESSAGE, + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, number=1, - message=vmwareengine_resources.HcxActivationKey, ) - next_page_token: str = proto.Field( + request_id: str = proto.Field( proto.STRING, number=2, ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) -class ListHcxActivationKeysRequest(proto.Message): +class ListSubnetsRequest(proto.Message): r"""Request message for - [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] Attributes: parent (str): Required. The resource name of the private cloud to be - queried for HCX activation keys. Resource names are - schemeless URIs that follow the conventions in + queried for subnets. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` page_size (int): - The maximum number of HCX activation keys to - return in one page. The service may return fewer - than this value. The maximum value is coerced to - 1000. + The maximum number of subnets to return in + one page. The service may return fewer than this + value. The maximum value is coerced to 1000. The default value of this field is 500. page_token (str): A page token, received from a previous - ``ListHcxActivationKeys`` call. Provide this to retrieve the + ``ListSubnetsRequest`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to - ``ListHcxActivationKeys`` must match the call that provided - the page token. + ``ListSubnetsRequest`` must match the call that provided the + page token. """ parent: str = proto.Field( @@ -1196,17 +1229,53 @@ class ListHcxActivationKeysRequest(proto.Message): ) -class GetHcxActivationKeyRequest(proto.Message): - r"""Request message for [VmwareEngine.GetHcxActivationKeys][] +class ListSubnetsResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListSubnets][google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets] + + Attributes: + subnets (MutableSequence[google.cloud.vmwareengine_v1.types.Subnet]): + A list of subnets. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + subnets: MutableSequence[vmwareengine_resources.Subnet] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.Subnet, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSubnetRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetSubnet][google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet] Attributes: name (str): - Required. The resource name of the HCX activation key to - retrieve. Resource names are schemeless URIs that follow the + Required. The resource name of the subnet to retrieve. + Resource names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/subnets/my-subnet`` """ name: str = proto.Field( @@ -1215,103 +1284,1976 @@ class GetHcxActivationKeyRequest(proto.Message): ) -class CreateHcxActivationKeyRequest(proto.Message): +class UpdateSubnetRequest(proto.Message): r"""Request message for - [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet] Attributes: - parent (str): - Required. The resource name of the private cloud to create - the key for. Resource names are schemeless URIs that follow - the conventions in - https://cloud.google.com/apis/design/resource_names. For - example: - ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` - hcx_activation_key (google.cloud.vmwareengine_v1.types.HcxActivationKey): - Required. The initial description of a new - HCX activation key. When creating a new key, - this field must be an empty object. - hcx_activation_key_id (str): - Required. The user-provided identifier of the - ``HcxActivationKey`` to be created. This identifier must be - unique among ``HcxActivationKey`` resources within the - parent and becomes the final token in the name URI. The - identifier must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) - request_id (str): - A request ID to identify requests. Specify a - unique request ID so that if you must retry your - request, the server will know to ignore the - request if it has already been completed. The - server guarantees that a request doesn't result - in creation of duplicate commitments for at - least 60 minutes. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``Subnet`` resource by the update. The + fields specified in the ``update_mask`` are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + subnet (google.cloud.vmwareengine_v1.types.Subnet): + Required. Subnet description. """ - parent: str = proto.Field( - proto.STRING, + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, number=1, + message=field_mask_pb2.FieldMask, ) - hcx_activation_key: vmwareengine_resources.HcxActivationKey = proto.Field( + subnet: vmwareengine_resources.Subnet = proto.Field( proto.MESSAGE, number=2, - message=vmwareengine_resources.HcxActivationKey, - ) - hcx_activation_key_id: str = proto.Field( - proto.STRING, - number=3, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, + message=vmwareengine_resources.Subnet, ) -class ListNetworkPoliciesRequest(proto.Message): +class ListExternalAccessRulesRequest(proto.Message): r"""Request message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] Attributes: parent (str): - Required. The resource name of the location (region) to - query for network policies. Resource names are schemeless - URIs that follow the conventions in + Required. The resource name of the network policy to query + for external access firewall rules. Resource names are + schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` page_size (int): - The maximum number of network policies to - return in one page. The service may return fewer - than this value. The maximum value is coerced to - 1000. + The maximum number of external access rules + to return in one page. The service may return + fewer than this value. The maximum value is + coerced to 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``ListExternalAccessRulesRequest`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListExternalAccessRulesRequest`` must match the call that + provided the page token. + filter (str): + A filter expression that matches resources returned in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be ``=``, ``!=``, + ``>``, or ``<``. + + For example, if you are filtering a list of external access + rules, you can exclude the ones named ``example-rule`` by + specifying ``name != "example-rule"``. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (name = "example-rule") + (createTime > "2021-04-12T08:15:10.40Z") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (name = "example-rule-1") AND + (createTime > "2021-04-12T08:15:10.40Z") OR + (name = "example-rule-2") + order_by (str): + Sorts list results by a certain order. By default, returned + results are ordered by ``name`` in ascending order. You can + also sort results in descending order based on the ``name`` + value using ``orderBy="name desc"``. Currently, only + ordering by ``name`` is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListExternalAccessRulesResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListExternalAccessRules][google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules] + + Attributes: + external_access_rules (MutableSequence[google.cloud.vmwareengine_v1.types.ExternalAccessRule]): + A list of external access firewall rules. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + external_access_rules: MutableSequence[ + vmwareengine_resources.ExternalAccessRule + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.ExternalAccessRule, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetExternalAccessRuleRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule] + + Attributes: + name (str): + Required. The resource name of the external access firewall + rule to retrieve. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateExternalAccessRuleRequest(proto.Message): + r"""Request message for + [VmwareEngine.CreateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule] + + Attributes: + parent (str): + Required. The resource name of the network policy to create + a new external access firewall rule in. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy`` + external_access_rule (google.cloud.vmwareengine_v1.types.ExternalAccessRule): + Required. The initial description of a new + external access rule. + external_access_rule_id (str): + Required. The user-provided identifier of the + ``ExternalAccessRule`` to be created. This identifier must + be unique among ``ExternalAccessRule`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + request_id (str): + A request ID to identify requests. Specify a + unique request ID so that if you must retry your + request, the server will know to ignore the + request if it has already been completed. The + server guarantees that a request doesn't result + in creation of duplicate commitments for at + least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + external_access_rule: vmwareengine_resources.ExternalAccessRule = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ExternalAccessRule, + ) + external_access_rule_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateExternalAccessRuleRequest(proto.Message): + r"""Request message for + [VmwareEngine.UpdateExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule] + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``ExternalAccessRule`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field will + be overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + external_access_rule (google.cloud.vmwareengine_v1.types.ExternalAccessRule): + Required. Description of the external access + rule. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + external_access_rule: vmwareengine_resources.ExternalAccessRule = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ExternalAccessRule, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteExternalAccessRuleRequest(proto.Message): + r"""Request message for + [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule] + + Attributes: + name (str): + Required. The resource name of the external access firewall + rule to delete. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListLoggingServersRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] + + Attributes: + parent (str): + Required. The resource name of the private cloud to be + queried for logging servers. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + page_size (int): + The maximum number of logging servers to + return in one page. The service may return fewer + than this value. The maximum value is coerced to + 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``ListLoggingServersRequest`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListLoggingServersRequest`` must match the call that + provided the page token. + filter (str): + A filter expression that matches resources returned in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be ``=``, ``!=``, + ``>``, or ``<``. + + For example, if you are filtering a list of logging servers, + you can exclude the ones named ``example-server`` by + specifying ``name != "example-server"``. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (name = "example-server") + (createTime > "2021-04-12T08:15:10.40Z") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (name = "example-server-1") AND + (createTime > "2021-04-12T08:15:10.40Z") OR + (name = "example-server-2") + order_by (str): + Sorts list results by a certain order. By default, returned + results are ordered by ``name`` in ascending order. You can + also sort results in descending order based on the ``name`` + value using ``orderBy="name desc"``. Currently, only + ordering by ``name`` is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLoggingServersResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListLoggingServers][google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers] + + Attributes: + logging_servers (MutableSequence[google.cloud.vmwareengine_v1.types.LoggingServer]): + A list of Logging Servers. + next_page_token (str): + A token, which can be send as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + logging_servers: MutableSequence[ + vmwareengine_resources.LoggingServer + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.LoggingServer, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetLoggingServerRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer] + + Attributes: + name (str): + Required. The resource name of the Logging Server to + retrieve. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLoggingServerRequest(proto.Message): + r"""Request message for + [VmwareEngine.CreateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer] + + Attributes: + parent (str): + Required. The resource name of the private cloud to create a + new Logging Server in. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + logging_server (google.cloud.vmwareengine_v1.types.LoggingServer): + Required. The initial description of a new + logging server. + logging_server_id (str): + Required. The user-provided identifier of the + ``LoggingServer`` to be created. This identifier must be + unique among ``LoggingServer`` resources within the parent + and becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + logging_server: vmwareengine_resources.LoggingServer = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.LoggingServer, + ) + logging_server_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateLoggingServerRequest(proto.Message): + r"""Request message for + [VmwareEngine.UpdateLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer] + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``LoggingServer`` resource by the update. + The fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + logging_server (google.cloud.vmwareengine_v1.types.LoggingServer): + Required. Logging server description. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + logging_server: vmwareengine_resources.LoggingServer = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.LoggingServer, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLoggingServerRequest(proto.Message): + r"""Request message for + [VmwareEngine.DeleteLoggingServer][google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer] + + Attributes: + name (str): + Required. The resource name of the logging server to delete. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. True if the user has requested cancellation of + the operation; false otherwise. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListNodeTypesRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + + Attributes: + parent (str): + Required. The resource name of the location to be queried + for node types. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1-a`` + page_size (int): + The maximum number of node types to return in + one page. The service may return fewer than this + value. The maximum value is coerced to 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous ``ListNodeTypes`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListNodeTypes`` must match the call that provided the page + token. + filter (str): + A filter expression that matches resources returned in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be ``=``, ``!=``, + ``>``, or ``<``. + + For example, if you are filtering a list of node types, you + can exclude the ones named ``standard-72`` by specifying + ``name != "standard-72"``. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (name = "standard-72") + (virtual_cpu_count > 2) + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (name = "standard-96") AND + (virtual_cpu_count > 2) OR + (name = "standard-72") + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListNodeTypesResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListNodeTypes][google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes] + + Attributes: + node_types (MutableSequence[google.cloud.vmwareengine_v1.types.NodeType]): + A list of Node Types. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + node_types: MutableSequence[vmwareengine_resources.NodeType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.NodeType, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetNodeTypeRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetNodeType][google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType] + + Attributes: + name (str): + Required. The resource name of the node type to retrieve. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-proj/locations/us-central1-a/nodeTypes/standard-72`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ShowNsxCredentialsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ShowNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials] + + Attributes: + private_cloud (str): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + """ + + private_cloud: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ShowVcenterCredentialsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ShowVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials] + + Attributes: + private_cloud (str): + Required. The resource name of the private cloud to be + queried for credentials. Resource names are schemeless URIs + that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + username (str): + Optional. The username of the user to be + queried for credentials. The default value of + this field is CloudOwner@gve.local. The provided + value must be one of the following: + + CloudOwner@gve.local, + solution-user-01@gve.local, + solution-user-02@gve.local, + solution-user-03@gve.local, + solution-user-04@gve.local, + solution-user-05@gve.local, + zertoadmin@gve.local. + """ + + private_cloud: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ResetNsxCredentialsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ResetNsxCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials] + + Attributes: + private_cloud (str): + Required. The resource name of the private cloud to reset + credentials for. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + private_cloud: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ResetVcenterCredentialsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ResetVcenterCredentials][google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials] + + Attributes: + private_cloud (str): + Required. The resource name of the private cloud to reset + credentials for. Resource names are schemeless URIs that + follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + username (str): + Optional. The username of the user to be to + reset the credentials. The default value of this + field is CloudOwner@gve.local. The provided + value should be one of the following: + + solution-user-01@gve.local, + solution-user-02@gve.local, + solution-user-03@gve.local, + solution-user-04@gve.local, + solution-user-05@gve.local, + zertoadmin@gve.local. + """ + + private_cloud: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListHcxActivationKeysResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + + Attributes: + hcx_activation_keys (MutableSequence[google.cloud.vmwareengine_v1.types.HcxActivationKey]): + List of HCX activation keys. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + hcx_activation_keys: MutableSequence[ + vmwareengine_resources.HcxActivationKey + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.HcxActivationKey, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListHcxActivationKeysRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListHcxActivationKeys][google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys] + + Attributes: + parent (str): + Required. The resource name of the private cloud to be + queried for HCX activation keys. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + page_size (int): + The maximum number of HCX activation keys to + return in one page. The service may return fewer + than this value. The maximum value is coerced to + 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``ListHcxActivationKeys`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListHcxActivationKeys`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetHcxActivationKeyRequest(proto.Message): + r"""Request message for [VmwareEngine.GetHcxActivationKeys][] + + Attributes: + name (str): + Required. The resource name of the HCX activation key to + retrieve. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud/hcxActivationKeys/my-key`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateHcxActivationKeyRequest(proto.Message): + r"""Request message for + [VmwareEngine.CreateHcxActivationKey][google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey] + + Attributes: + parent (str): + Required. The resource name of the private cloud to create + the key for. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/privateClouds/my-cloud`` + hcx_activation_key (google.cloud.vmwareengine_v1.types.HcxActivationKey): + Required. The initial description of a new + HCX activation key. When creating a new key, + this field must be an empty object. + hcx_activation_key_id (str): + Required. The user-provided identifier of the + ``HcxActivationKey`` to be created. This identifier must be + unique among ``HcxActivationKey`` resources within the + parent and becomes the final token in the name URI. The + identifier must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + request_id (str): + A request ID to identify requests. Specify a + unique request ID so that if you must retry your + request, the server will know to ignore the + request if it has already been completed. The + server guarantees that a request doesn't result + in creation of duplicate commitments for at + least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + hcx_activation_key: vmwareengine_resources.HcxActivationKey = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.HcxActivationKey, + ) + hcx_activation_key_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetDnsForwardingRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding] + + Attributes: + name (str): + Required. The resource name of a ``DnsForwarding`` to + retrieve. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/dnsForwarding`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDnsForwardingRequest(proto.Message): + r"""Request message for + [VmwareEngine.UpdateDnsForwarding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding] + + Attributes: + dns_forwarding (google.cloud.vmwareengine_v1.types.DnsForwarding): + Required. DnsForwarding config details. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``DnsForwarding`` resource by the update. + The fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + dns_forwarding: vmwareengine_resources.DnsForwarding = proto.Field( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.DnsForwarding, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateNetworkPeeringRequest(proto.Message): + r"""Request message for + [VmwareEngine.CreateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering] + + Attributes: + parent (str): + Required. The resource name of the location to create the + new network peering in. This value is always ``global``, + because ``NetworkPeering`` is a global resource. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + network_peering_id (str): + Required. The user-provided identifier of the new + ``NetworkPeering``. This identifier must be unique among + ``NetworkPeering`` resources within the parent and becomes + the final token in the name URI. The identifier must meet + the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + network_peering (google.cloud.vmwareengine_v1.types.NetworkPeering): + Required. The initial description of the new + network peering. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + network_peering_id: str = proto.Field( + proto.STRING, + number=2, + ) + network_peering: vmwareengine_resources.NetworkPeering = proto.Field( + proto.MESSAGE, + number=3, + message=vmwareengine_resources.NetworkPeering, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteNetworkPeeringRequest(proto.Message): + r"""Request message for + [VmwareEngine.DeleteNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering] + + Attributes: + name (str): + Required. The resource name of the network peering to be + deleted. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetNetworkPeeringRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering] + + Attributes: + name (str): + Required. The resource name of the network peering to + retrieve. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListNetworkPeeringsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + + Attributes: + parent (str): + Required. The resource name of the location (global) to + query for network peerings. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/global`` + page_size (int): + The maximum number of network peerings to + return in one page. The maximum value is coerced + to 1000. The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``ListNetworkPeerings`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListNetworkPeerings`` must match the call that provided + the page token. + filter (str): + A filter expression that matches resources returned in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be ``=``, ``!=``, + ``>``, or ``<``. + + For example, if you are filtering a list of network + peerings, you can exclude the ones named ``example-peering`` + by specifying ``name != "example-peering"``. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (name = "example-peering") + (createTime > "2021-04-12T08:15:10.40Z") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (name = "example-peering-1") AND + (createTime > "2021-04-12T08:15:10.40Z") OR + (name = "example-peering-2") + order_by (str): + Sorts list results by a certain order. By default, returned + results are ordered by ``name`` in ascending order. You can + also sort results in descending order based on the ``name`` + value using ``orderBy="name desc"``. Currently, only + ordering by ``name`` is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateNetworkPeeringRequest(proto.Message): + r"""Request message for + [VmwareEngine.UpdateNetworkPeering][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering] + + Attributes: + network_peering (google.cloud.vmwareengine_v1.types.NetworkPeering): + Required. Network peering description. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPeering`` resource by the + update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field will + be overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + network_peering: vmwareengine_resources.NetworkPeering = proto.Field( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.NetworkPeering, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListNetworkPeeringsResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListNetworkPeerings][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings] + + Attributes: + network_peerings (MutableSequence[google.cloud.vmwareengine_v1.types.NetworkPeering]): + A list of network peerings. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + network_peerings: MutableSequence[ + vmwareengine_resources.NetworkPeering + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.NetworkPeering, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListPeeringRoutesRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + + Attributes: + parent (str): + Required. The resource name of the network peering to + retrieve peering routes from. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + page_size (int): + The maximum number of peering routes to + return in one page. The service may return fewer + than this value. The maximum value is coerced to + 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous ``ListPeeringRoutes`` + call. Provide this to retrieve the subsequent page. When + paginating, all other parameters provided to + ``ListPeeringRoutes`` must match the call that provided the + page token. + filter (str): + A filter expression that matches resources returned in the + response. Currently, only filtering on the ``direction`` + field is supported. To return routes imported from the peer + network, provide "direction=INCOMING". To return routes + exported from the VMware Engine network, provide + "direction=OUTGOING". Other filter expressions return an + error. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ListPeeringRoutesResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListPeeringRoutes][google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes] + + Attributes: + peering_routes (MutableSequence[google.cloud.vmwareengine_v1.types.PeeringRoute]): + A list of peering routes. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + peering_routes: MutableSequence[ + vmwareengine_resources.PeeringRoute + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.PeeringRoute, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListNetworkPoliciesRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + + Attributes: + parent (str): + Required. The resource name of the location (region) to + query for network policies. Resource names are schemeless + URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + page_size (int): + The maximum number of network policies to + return in one page. The service may return fewer + than this value. The maximum value is coerced to + 1000. + The default value of this field is 500. + page_token (str): + A page token, received from a previous + ``ListNetworkPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListNetworkPolicies`` must match the call that provided + the page token. + filter (str): + A filter expression that matches resources returned in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be ``=``, ``!=``, + ``>``, or ``<``. + + For example, if you are filtering a list of network + policies, you can exclude the ones named ``example-policy`` + by specifying ``name != "example-policy"``. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (name = "example-policy") + (createTime > "2021-04-12T08:15:10.40Z") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (name = "example-policy-1") AND + (createTime > "2021-04-12T08:15:10.40Z") OR + (name = "example-policy-2") + order_by (str): + Sorts list results by a certain order. By default, returned + results are ordered by ``name`` in ascending order. You can + also sort results in descending order based on the ``name`` + value using ``orderBy="name desc"``. Currently, only + ordering by ``name`` is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListNetworkPoliciesResponse(proto.Message): + r"""Response message for + [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + + Attributes: + network_policies (MutableSequence[google.cloud.vmwareengine_v1.types.NetworkPolicy]): + A list of network policies. + next_page_token (str): + A token, which can be send as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached when + making an aggregated query using wildcards. + """ + + @property + def raw_page(self): + return self + + network_policies: MutableSequence[ + vmwareengine_resources.NetworkPolicy + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.NetworkPolicy, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetNetworkPolicyRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + + Attributes: + name (str): + Required. The resource name of the network policy to + retrieve. Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateNetworkPolicyRequest(proto.Message): + r"""Request message for + [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] + + Attributes: + network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): + Required. Network policy description. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the ``NetworkPolicy`` resource by the update. + The fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( + proto.MESSAGE, + number=1, + message=vmwareengine_resources.NetworkPolicy, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateNetworkPolicyRequest(proto.Message): + r"""Request message for + [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + + Attributes: + parent (str): + Required. The resource name of the location (region) to + create the new network policy in. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: ``projects/my-project/locations/us-central1`` + network_policy_id (str): + Required. The user-provided identifier of the network policy + to be created. This identifier must be unique within parent + ``projects/{my-project}/locations/{us-central1}/networkPolicies`` + and becomes the final token in the name URI. The identifier + must meet the following requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): + Required. The network policy configuration to + use in the request. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + network_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( + proto.MESSAGE, + number=3, + message=vmwareengine_resources.NetworkPolicy, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteNetworkPolicyRequest(proto.Message): + r"""Request message for + [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] + + Attributes: + name (str): + Required. The resource name of the network policy to delete. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListManagementDnsZoneBindingsRequest(proto.Message): + r"""Request message for + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] + + Attributes: + parent (str): + Required. The resource name of the private cloud to be + queried for management DNS zone bindings. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + page_size (int): + The maximum number of management DNS zone + bindings to return in one page. The service may + return fewer than this value. The maximum value + is coerced to 1000. The default value of this field is 500. page_token (str): A page token, received from a previous - ``ListNetworkPolicies`` call. Provide this to retrieve the - subsequent page. + ``ListManagementDnsZoneBindings`` call. Provide this to + retrieve the subsequent page. When paginating, all other parameters provided to - ``ListNetworkPolicies`` must match the call that provided - the page token. + ``ListManagementDnsZoneBindings`` must match the call that + provided the page token. filter (str): A filter expression that matches resources returned in the response. The expression must specify the field name, a @@ -1320,16 +3262,17 @@ class ListNetworkPoliciesRequest(proto.Message): boolean. The comparison operator must be ``=``, ``!=``, ``>``, or ``<``. - For example, if you are filtering a list of network - policies, you can exclude the ones named ``example-policy`` - by specifying ``name != "example-policy"``. + For example, if you are filtering a list of Management DNS + Zone Bindings, you can exclude the ones named + ``example-management-dns-zone-binding`` by specifying + ``name != "example-management-dns-zone-binding"``. To filter on multiple expressions, provide each separate expression within parentheses. For example: :: - (name = "example-policy") + (name = "example-management-dns-zone-binding") (createTime > "2021-04-12T08:15:10.40Z") By default, each expression is an ``AND`` expression. @@ -1338,9 +3281,9 @@ class ListNetworkPoliciesRequest(proto.Message): :: - (name = "example-policy-1") AND + (name = "example-management-dns-zone-binding-1") AND (createTime > "2021-04-12T08:15:10.40Z") OR - (name = "example-policy-2") + (name = "example-management-dns-zone-binding-2") order_by (str): Sorts list results by a certain order. By default, returned results are ordered by ``name`` in ascending order. You can @@ -1371,15 +3314,15 @@ class ListNetworkPoliciesRequest(proto.Message): ) -class ListNetworkPoliciesResponse(proto.Message): +class ListManagementDnsZoneBindingsResponse(proto.Message): r"""Response message for - [VmwareEngine.ListNetworkPolicies][google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies] + [VmwareEngine.ListManagementDnsZoneBindings][google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings] Attributes: - network_policies (MutableSequence[google.cloud.vmwareengine_v1.types.NetworkPolicy]): - A list of network policies. + management_dns_zone_bindings (MutableSequence[google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding]): + A list of management DNS zone bindings. next_page_token (str): - A token, which can be send as ``page_token`` to retrieve the + A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. unreachable (MutableSequence[str]): @@ -1391,12 +3334,12 @@ class ListNetworkPoliciesResponse(proto.Message): def raw_page(self): return self - network_policies: MutableSequence[ - vmwareengine_resources.NetworkPolicy + management_dns_zone_bindings: MutableSequence[ + vmwareengine_resources.ManagementDnsZoneBinding ] = proto.RepeatedField( proto.MESSAGE, number=1, - message=vmwareengine_resources.NetworkPolicy, + message=vmwareengine_resources.ManagementDnsZoneBinding, ) next_page_token: str = proto.Field( proto.STRING, @@ -1408,18 +3351,18 @@ def raw_page(self): ) -class GetNetworkPolicyRequest(proto.Message): +class GetManagementDnsZoneBindingRequest(proto.Message): r"""Request message for - [VmwareEngine.GetNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy] + [VmwareEngine.GetManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding] Attributes: name (str): - Required. The resource name of the network policy to - retrieve. Resource names are schemeless URIs that follow the - conventions in + Required. The resource name of the management DNS zone + binding to retrieve. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` """ name: str = proto.Field( @@ -1428,20 +3371,94 @@ class GetNetworkPolicyRequest(proto.Message): ) -class UpdateNetworkPolicyRequest(proto.Message): +class CreateManagementDnsZoneBindingRequest(proto.Message): + r"""Request message for [VmwareEngine.CreateManagementDnsZoneBindings][] + + Attributes: + parent (str): + Required. The resource name of the private cloud to create a + new management DNS zone binding for. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud`` + management_dns_zone_binding (google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding): + Required. The initial values for a new + management DNS zone binding. + management_dns_zone_binding_id (str): + Required. The user-provided identifier of the + ``ManagementDnsZoneBinding`` resource to be created. This + identifier must be unique among ``ManagementDnsZoneBinding`` + resources within the parent and becomes the final token in + the name URI. The identifier must meet the following + requirements: + + - Only contains 1-63 alphanumeric characters and hyphens + - Begins with an alphabetical character + - Ends with a non-hyphen character + - Not formatted as a UUID + - Complies with `RFC + 1034 `__ + (section 3.5) + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + management_dns_zone_binding: vmwareengine_resources.ManagementDnsZoneBinding = ( + proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ManagementDnsZoneBinding, + ) + ) + management_dns_zone_binding_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateManagementDnsZoneBindingRequest(proto.Message): r"""Request message for - [VmwareEngine.UpdateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPolicy] + [VmwareEngine.UpdateManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding] Attributes: - network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): - Required. Network policy description. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the fields to be - overwritten in the ``NetworkPolicy`` resource by the update. - The fields specified in the ``update_mask`` are relative to - the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not + overwritten in the ``ManagementDnsZoneBinding`` resource by + the update. The fields specified in the ``update_mask`` are + relative to the resource, not the full request. A field will + be overwritten if it is in the mask. If the user does not provide a mask then all fields will be overwritten. + management_dns_zone_binding (google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding): + Required. New values to update the management + DNS zone binding with. request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must @@ -1454,9 +3471,9 @@ class UpdateNetworkPolicyRequest(proto.Message): For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1465,50 +3482,36 @@ class UpdateNetworkPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( - proto.MESSAGE, - number=1, - message=vmwareengine_resources.NetworkPolicy, - ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, - number=2, + number=1, message=field_mask_pb2.FieldMask, ) + management_dns_zone_binding: vmwareengine_resources.ManagementDnsZoneBinding = ( + proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.ManagementDnsZoneBinding, + ) + ) request_id: str = proto.Field( proto.STRING, number=3, ) -class CreateNetworkPolicyRequest(proto.Message): +class DeleteManagementDnsZoneBindingRequest(proto.Message): r"""Request message for - [VmwareEngine.CreateNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy] + [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding] Attributes: - parent (str): - Required. The resource name of the location (region) to - create the new network policy in. Resource names are - schemeless URIs that follow the conventions in + name (str): + Required. The resource name of the management DNS zone + binding to delete. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For - example: ``projects/my-project/locations/us-central1`` - network_policy_id (str): - Required. The user-provided identifier of the network policy - to be created. This identifier must be unique within parent - ``projects/{my-project}/locations/{us-central1}/networkPolicies`` - and becomes the final token in the name URI. The identifier - must meet the following requirements: - - - Only contains 1-63 alphanumeric characters and hyphens - - Begins with an alphabetical character - - Ends with a non-hyphen character - - Not formatted as a UUID - - Complies with `RFC - 1034 `__ - (section 3.5) - network_policy (google.cloud.vmwareengine_v1.types.NetworkPolicy): - Required. The network policy configuration to - use in the request. + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must @@ -1521,9 +3524,9 @@ class CreateNetworkPolicyRequest(proto.Message): For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -1532,37 +3535,27 @@ class CreateNetworkPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). """ - parent: str = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - network_policy_id: str = proto.Field( - proto.STRING, - number=2, - ) - network_policy: vmwareengine_resources.NetworkPolicy = proto.Field( - proto.MESSAGE, - number=3, - message=vmwareengine_resources.NetworkPolicy, - ) request_id: str = proto.Field( proto.STRING, - number=4, + number=2, ) -class DeleteNetworkPolicyRequest(proto.Message): - r"""Request message for - [VmwareEngine.DeleteNetworkPolicy][google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy] +class RepairManagementDnsZoneBindingRequest(proto.Message): + r"""Request message for [VmwareEngine.RepairManagementDnsZoneBindings][] Attributes: name (str): - Required. The resource name of the network policy to delete. - Resource names are schemeless URIs that follow the - conventions in + Required. The resource name of the management DNS zone + binding to repair. Resource names are schemeless URIs that + follow the conventions in https://cloud.google.com/apis/design/resource_names. For example: - ``projects/my-project/locations/us-central1/networkPolicies/my-network-policy`` + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` request_id (str): Optional. A request ID to identify requests. Specify a unique request ID so that if you must @@ -1575,9 +3568,9 @@ class DeleteNetworkPolicyRequest(proto.Message): For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This + ID, the server can check if the original + operation with the same request ID was received, + and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -2298,4 +4291,140 @@ def raw_page(self): ) +class GrantDnsBindPermissionRequest(proto.Message): + r"""Request message for + [VmwareEngine.GrantDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission] + + Attributes: + name (str): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to the + corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` + principal (google.cloud.vmwareengine_v1.types.Principal): + Required. The consumer provided user/service + account which needs to be granted permission to + bind with the intranet VPC corresponding to the + consumer project. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + principal: vmwareengine_resources.Principal = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.Principal, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class RevokeDnsBindPermissionRequest(proto.Message): + r"""Request message for + [VmwareEngine.RevokeDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission] + + Attributes: + name (str): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to the + corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` + principal (google.cloud.vmwareengine_v1.types.Principal): + Required. The consumer provided user/service + account which needs to be granted permission to + bind with the intranet VPC corresponding to the + consumer project. + request_id (str): + Optional. A request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. The server guarantees that a request + doesn't result in creation of duplicate + commitments for at least 60 minutes. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + principal: vmwareengine_resources.Principal = proto.Field( + proto.MESSAGE, + number=2, + message=vmwareengine_resources.Principal, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetDnsBindPermissionRequest(proto.Message): + r"""Request message for + [VmwareEngine.GetDnsBindPermission][google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission] + + Attributes: + name (str): + Required. The name of the resource which stores the + users/service accounts having the permission to bind to the + corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource. Resource names are + schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py index 631662d0c35a..ce7410825dbc 100644 --- a/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py +++ b/packages/google-cloud-vmwareengine/google/cloud/vmwareengine_v1/types/vmwareengine_resources.py @@ -25,19 +25,30 @@ manifest={ "NetworkConfig", "NodeTypeConfig", + "StretchedClusterConfig", "PrivateCloud", "Cluster", + "Node", + "ExternalAddress", "Subnet", + "ExternalAccessRule", + "LoggingServer", "NodeType", "Credentials", "HcxActivationKey", "Hcx", "Nsx", "Vcenter", + "DnsForwarding", + "NetworkPeering", "PeeringRoute", "NetworkPolicy", + "ManagementDnsZoneBinding", "VmwareEngineNetwork", "PrivateConnection", + "LocationMetadata", + "DnsBindPermission", + "Principal", }, ) @@ -73,6 +84,12 @@ class NetworkConfig(proto.Message): latest IP address layout used by all newly created private clouds. This version supports all current features. + dns_server_ip (str): + Output only. DNS Server IP of the Private + Cloud. All DNS queries can be forwarded to this + address for name resolution of Private Cloud's + management entities like vCenter, NSX-T Manager + and ESXi hosts. """ management_cidr: str = proto.Field( @@ -91,6 +108,10 @@ class NetworkConfig(proto.Message): proto.INT32, number=8, ) + dns_server_ip: str = proto.Field( + proto.STRING, + number=9, + ) class NodeTypeConfig(proto.Message): @@ -119,9 +140,40 @@ class NodeTypeConfig(proto.Message): ) +class StretchedClusterConfig(proto.Message): + r"""Configuration of a stretched cluster. + + Attributes: + preferred_location (str): + Required. Zone that will remain operational when connection + between the two zones is lost. Specify the resource name of + a zone that belongs to the region of the private cloud. For + example: ``projects/{project}/locations/europe-west3-a`` + where ``{project}`` can either be a project number or a + project ID. + secondary_location (str): + Required. Additional zone for a higher level of availability + and load balancing. Specify the resource name of a zone that + belongs to the region of the private cloud. For example: + ``projects/{project}/locations/europe-west3-b`` where + ``{project}`` can either be a project number or a project + ID. + """ + + preferred_location: str = proto.Field( + proto.STRING, + number=1, + ) + secondary_location: str = proto.Field( + proto.STRING, + number=2, + ) + + class PrivateCloud(proto.Message): - r"""Represents a private cloud resource. Private clouds are zonal - resources. + r"""Represents a private cloud resource. Private clouds of type + ``STANDARD`` and ``TIME_LIMITED`` are zonal resources, ``STRETCHED`` + private clouds are regional. Attributes: name (str): @@ -218,9 +270,14 @@ class Type(proto.Enum): life span. Will be deleted after defined period of time, can be converted into standard private cloud by expanding it up to 3 or more nodes. + STRETCHED (2): + Stretched private cloud is a regional + resource with redundancy, with a minimum of 6 + nodes, nodes count has to be even. """ STANDARD = 0 TIME_LIMITED = 1 + STRETCHED = 2 class ManagementCluster(proto.Message): r"""Management cluster configuration. @@ -242,6 +299,9 @@ class ManagementCluster(proto.Message): Required. The map of cluster node types in this cluster, where the key is canonical identifier of the node type (corresponds to the ``NodeType``). + stretched_cluster_config (google.cloud.vmwareengine_v1.types.StretchedClusterConfig): + Optional. Configuration of a stretched + cluster. Required for STRETCHED private clouds. """ cluster_id: str = proto.Field( @@ -254,6 +314,11 @@ class ManagementCluster(proto.Message): number=7, message="NodeTypeConfig", ) + stretched_cluster_config: "StretchedClusterConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="StretchedClusterConfig", + ) name: str = proto.Field( proto.STRING, @@ -353,6 +418,10 @@ class Cluster(proto.Message): Required. The map of cluster node types in this cluster, where the key is canonical identifier of the node type (corresponds to the ``NodeType``). + stretched_cluster_config (google.cloud.vmwareengine_v1.types.StretchedClusterConfig): + Optional. Configuration of a stretched + cluster. Required for clusters that belong to a + STRETCHED private cloud. """ class State(proto.Enum): @@ -416,6 +485,183 @@ class State(proto.Enum): number=16, message="NodeTypeConfig", ) + stretched_cluster_config: "StretchedClusterConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="StretchedClusterConfig", + ) + + +class Node(proto.Message): + r"""Node in a cluster. + + Attributes: + name (str): + Output only. The resource name of this node. Resource names + are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + projects/my-project/locations/us-central1-a/privateClouds/my-cloud/clusters/my-cluster/nodes/my-node + fqdn (str): + Output only. Fully qualified domain name of + the node. + internal_ip (str): + Output only. Internal IP address of the node. + node_type_id (str): + Output only. The canonical identifier of the node type + (corresponds to the ``NodeType``). For example: standard-72. + version (str): + Output only. The version number of the VMware + ESXi management component in this cluster. + custom_core_count (int): + Output only. Customized number of cores + state (google.cloud.vmwareengine_v1.types.Node.State): + Output only. The state of the appliance. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of a node in a cluster. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + Node is operational and can be used by the + user. + CREATING (2): + Node is being provisioned. + FAILED (3): + Node is in a failed state. + UPGRADING (4): + Node is undergoing maintenance, e.g.: during + private cloud upgrade. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + FAILED = 3 + UPGRADING = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + fqdn: str = proto.Field( + proto.STRING, + number=2, + ) + internal_ip: str = proto.Field( + proto.STRING, + number=3, + ) + node_type_id: str = proto.Field( + proto.STRING, + number=4, + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + custom_core_count: int = proto.Field( + proto.INT64, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + + +class ExternalAddress(proto.Message): + r"""Represents an allocated external IP address and its + corresponding internal IP address in a private cloud. + + Attributes: + name (str): + Output only. The resource name of this external IP address. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-address`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + internal_ip (str): + The internal IP address of a workload VM. + external_ip (str): + Output only. The external IP address of a + workload VM. + state (google.cloud.vmwareengine_v1.types.ExternalAddress.State): + Output only. The state of the resource. + uid (str): + Output only. System-generated unique + identifier for the resource. + description (str): + User-provided description for this resource. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of external addresses. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + The address is ready. + CREATING (2): + The address is being created. + UPDATING (3): + The address is being updated. + DELETING (4): + The address is being deleted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + internal_ip: str = proto.Field( + proto.STRING, + number=6, + ) + external_ip: str = proto.Field( + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + uid: str = proto.Field( + proto.STRING, + number=9, + ) + description: str = proto.Field( + proto.STRING, + number=11, + ) class Subnet(proto.Message): @@ -441,6 +687,9 @@ class Subnet(proto.Message): example "management" or "userDefined". state (google.cloud.vmwareengine_v1.types.Subnet.State): Output only. The state of the resource. + vlan_id (int): + Output only. VLAN ID of the VLAN on which the + subnet is configured """ class State(proto.Enum): @@ -495,6 +744,339 @@ class State(proto.Enum): number=13, enum=State, ) + vlan_id: int = proto.Field( + proto.INT32, + number=16, + ) + + +class ExternalAccessRule(proto.Message): + r"""External access firewall rules for filtering incoming traffic + destined to ``ExternalAddress`` resources. + + Attributes: + name (str): + Output only. The resource name of this external access rule. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + description (str): + User-provided description for this external + access rule. + priority (int): + External access rule priority, which determines the external + access rule to use when multiple rules apply. If multiple + rules have the same priority, their ordering is + non-deterministic. If specific ordering is required, assign + unique priorities to enforce such ordering. The external + access rule priority is an integer from 100 to 4096, both + inclusive. Lower integers indicate higher precedence. For + example, a rule with priority ``100`` has higher precedence + than a rule with priority ``101``. + action (google.cloud.vmwareengine_v1.types.ExternalAccessRule.Action): + The action that the external access rule + performs. + ip_protocol (str): + The IP protocol to which the external access rule applies. + This value can be one of the following three protocol + strings (not case-sensitive): ``tcp``, ``udp``, or ``icmp``. + source_ip_ranges (MutableSequence[google.cloud.vmwareengine_v1.types.ExternalAccessRule.IpRange]): + If source ranges are specified, the external access rule + applies only to traffic that has a source IP address in + these ranges. These ranges can either be expressed in the + CIDR format or as an IP address. As only inbound rules are + supported, ``ExternalAddress`` resources cannot be the + source IP addresses of an external access rule. To match all + source addresses, specify ``0.0.0.0/0``. + source_ports (MutableSequence[str]): + A list of source ports to which the external access rule + applies. This field is only applicable for the UDP or TCP + protocol. Each entry must be either an integer or a range. + For example: ``["22"]``, ``["80","443"]``, or + ``["12345-12349"]``. To match all source ports, specify + ``["0-65535"]``. + destination_ip_ranges (MutableSequence[google.cloud.vmwareengine_v1.types.ExternalAccessRule.IpRange]): + If destination ranges are specified, the external access + rule applies only to the traffic that has a destination IP + address in these ranges. The specified IP addresses must + have reserved external IP addresses in the scope of the + parent network policy. To match all external IP addresses in + the scope of the parent network policy, specify + ``0.0.0.0/0``. To match a specific external IP address, + specify it using the ``IpRange.external_address`` property. + destination_ports (MutableSequence[str]): + A list of destination ports to which the external access + rule applies. This field is only applicable for the UDP or + TCP protocol. Each entry must be either an integer or a + range. For example: ``["22"]``, ``["80","443"]``, or + ``["12345-12349"]``. To match all destination ports, specify + ``["0-65535"]``. + state (google.cloud.vmwareengine_v1.types.ExternalAccessRule.State): + Output only. The state of the resource. + uid (str): + Output only. System-generated unique + identifier for the resource. + """ + + class Action(proto.Enum): + r"""Action determines whether the external access rule permits or + blocks traffic, subject to the other components of the rule + matching the traffic. + + Values: + ACTION_UNSPECIFIED (0): + Defaults to allow. + ALLOW (1): + Allows connections that match the other + specified components. + DENY (2): + Blocks connections that match the other + specified components. + """ + ACTION_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + class State(proto.Enum): + r"""Defines possible states of external access firewall rules. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + ACTIVE (1): + The rule is ready. + CREATING (2): + The rule is being created. + UPDATING (3): + The rule is being updated. + DELETING (4): + The rule is being deleted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 + + class IpRange(proto.Message): + r"""An IP range provided in any one of the supported formats. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_address (str): + A single IP address. For example: ``10.0.0.5``. + + This field is a member of `oneof`_ ``ip_range``. + ip_address_range (str): + An IP address range in the CIDR format. For example: + ``10.0.0.0/24``. + + This field is a member of `oneof`_ ``ip_range``. + external_address (str): + The name of an ``ExternalAddress`` resource. The external + address must have been reserved in the scope of this + external access rule's parent network policy. Provide the + external address name in the form of + ``projects/{project}/locations/{location}/privateClouds/{private_cloud}/externalAddresses/{external_address}``. + For example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/externalAddresses/my-address``. + + This field is a member of `oneof`_ ``ip_range``. + """ + + ip_address: str = proto.Field( + proto.STRING, + number=1, + oneof="ip_range", + ) + ip_address_range: str = proto.Field( + proto.STRING, + number=2, + oneof="ip_range", + ) + external_address: str = proto.Field( + proto.STRING, + number=3, + oneof="ip_range", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + priority: int = proto.Field( + proto.INT32, + number=6, + ) + action: Action = proto.Field( + proto.ENUM, + number=7, + enum=Action, + ) + ip_protocol: str = proto.Field( + proto.STRING, + number=8, + ) + source_ip_ranges: MutableSequence[IpRange] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=IpRange, + ) + source_ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + destination_ip_ranges: MutableSequence[IpRange] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=IpRange, + ) + destination_ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + state: State = proto.Field( + proto.ENUM, + number=13, + enum=State, + ) + uid: str = proto.Field( + proto.STRING, + number=14, + ) + + +class LoggingServer(proto.Message): + r"""Logging server to receive vCenter or ESXi logs. + + Attributes: + name (str): + Output only. The resource name of this logging server. + Resource names are schemeless URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/loggingServers/my-logging-server`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + hostname (str): + Required. Fully-qualified domain name (FQDN) + or IP Address of the logging server. + port (int): + Required. Port number at which the logging + server receives logs. + protocol (google.cloud.vmwareengine_v1.types.LoggingServer.Protocol): + Required. Protocol used by vCenter to send + logs to a logging server. + source_type (google.cloud.vmwareengine_v1.types.LoggingServer.SourceType): + Required. The type of component that produces + logs that will be forwarded to this logging + server. + uid (str): + Output only. System-generated unique + identifier for the resource. + """ + + class Protocol(proto.Enum): + r"""Defines possible protocols used to send logs to + a logging server. + + Values: + PROTOCOL_UNSPECIFIED (0): + Unspecified communications protocol. This is + the default value. + UDP (1): + UDP + TCP (2): + TCP + """ + PROTOCOL_UNSPECIFIED = 0 + UDP = 1 + TCP = 2 + + class SourceType(proto.Enum): + r"""Defines possible types of component that produces logs. + + Values: + SOURCE_TYPE_UNSPECIFIED (0): + The default value. This value should never be + used. + ESXI (1): + Logs produced by ESXI hosts + VCSA (2): + Logs produced by vCenter server + """ + SOURCE_TYPE_UNSPECIFIED = 0 + ESXI = 1 + VCSA = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + hostname: str = proto.Field( + proto.STRING, + number=5, + ) + port: int = proto.Field( + proto.INT32, + number=7, + ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=6, + enum=Protocol, + ) + source_type: SourceType = proto.Field( + proto.ENUM, + number=10, + enum=SourceType, + ) + uid: str = proto.Field( + proto.STRING, + number=8, + ) class NodeType(proto.Message): @@ -528,8 +1110,45 @@ class NodeType(proto.Message): available_custom_core_counts (MutableSequence[int]): Output only. List of possible values of custom core count. + kind (google.cloud.vmwareengine_v1.types.NodeType.Kind): + Output only. The type of the resource. + families (MutableSequence[str]): + Output only. Families of the node type. For node types to be + in the same cluster they must share at least one element in + the ``families``. + capabilities (MutableSequence[google.cloud.vmwareengine_v1.types.NodeType.Capability]): + Output only. Capabilities of this node type. """ + class Kind(proto.Enum): + r"""Enum Kind defines possible types of a NodeType. + + Values: + KIND_UNSPECIFIED (0): + The default value. This value should never be + used. + STANDARD (1): + Standard HCI node. + STORAGE_ONLY (2): + Storage only Node. + """ + KIND_UNSPECIFIED = 0 + STANDARD = 1 + STORAGE_ONLY = 2 + + class Capability(proto.Enum): + r"""Capability of a node type. + + Values: + CAPABILITY_UNSPECIFIED (0): + The default value. This value is used if the + capability is omitted or unknown. + STRETCHED_CLUSTERS (1): + This node type supports stretch clusters. + """ + CAPABILITY_UNSPECIFIED = 0 + STRETCHED_CLUSTERS = 1 + name: str = proto.Field( proto.STRING, number=1, @@ -562,6 +1181,20 @@ class NodeType(proto.Message): proto.INT32, number=11, ) + kind: Kind = proto.Field( + proto.ENUM, + number=12, + enum=Kind, + ) + families: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + capabilities: MutableSequence[Capability] = proto.RepeatedField( + proto.ENUM, + number=14, + enum=Capability, + ) class Credentials(proto.Message): @@ -735,71 +1368,375 @@ class State(proto.Enum): ACTIVE = 1 CREATING = 2 - internal_ip: str = proto.Field( + internal_ip: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + fqdn: str = proto.Field( + proto.STRING, + number=6, + ) + + +class Vcenter(proto.Message): + r"""Details about a vCenter Server management appliance. + + Attributes: + internal_ip (str): + Internal IP address of the appliance. + version (str): + Version of the appliance. + state (google.cloud.vmwareengine_v1.types.Vcenter.State): + Output only. The state of the appliance. + fqdn (str): + Fully qualified domain name of the appliance. + """ + + class State(proto.Enum): + r"""State of the appliance + + Values: + STATE_UNSPECIFIED (0): + Unspecified appliance state. This is the + default value. + ACTIVE (1): + The appliance is operational and can be used. + CREATING (2): + The appliance is being deployed. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + + internal_ip: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + fqdn: str = proto.Field( + proto.STRING, + number=6, + ) + + +class DnsForwarding(proto.Message): + r"""DNS forwarding config. + This config defines a list of domain to name server mappings, + and is attached to the private cloud for custom domain + resolution. + + Attributes: + name (str): + Output only. The resource name of this DNS profile. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/dnsForwarding`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + forwarding_rules (MutableSequence[google.cloud.vmwareengine_v1.types.DnsForwarding.ForwardingRule]): + Required. List of domain mappings to + configure + """ + + class ForwardingRule(proto.Message): + r"""A forwarding rule is a mapping of a ``domain`` to ``name_servers``. + This mapping allows VMware Engine to resolve domains for attached + private clouds by forwarding DNS requests for a given domain to the + specified nameservers. + + Attributes: + domain (str): + Required. Domain used to resolve a ``name_servers`` list. + name_servers (MutableSequence[str]): + Required. List of DNS servers to use for + domain resolution + """ + + domain: str = proto.Field( + proto.STRING, + number=1, + ) + name_servers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + name: str = proto.Field( proto.STRING, - number=2, + number=1, ) - version: str = proto.Field( - proto.STRING, - number=4, + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, ) - fqdn: str = proto.Field( - proto.STRING, - number=6, + forwarding_rules: MutableSequence[ForwardingRule] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ForwardingRule, ) -class Vcenter(proto.Message): - r"""Details about a vCenter Server management appliance. +class NetworkPeering(proto.Message): + r"""Details of a network peering. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - internal_ip (str): - Internal IP address of the appliance. - version (str): - Version of the appliance. - state (google.cloud.vmwareengine_v1.types.Vcenter.State): - Output only. The state of the appliance. - fqdn (str): - Fully qualified domain name of the appliance. + name (str): + Output only. The resource name of the network peering. + NetworkPeering is a global resource and location can only be + global. Resource names are scheme-less URIs that follow the + conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/networkPeerings/my-peering`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + peer_network (str): + Required. The relative resource name of the network to peer + with a standard VMware Engine network. The provided network + can be a consumer VPC network or another standard VMware + Engine network. If the ``peer_network_type`` is + VMWARE_ENGINE_NETWORK, specify the name in the form: + ``projects/{project}/locations/global/vmwareEngineNetworks/{vmware_engine_network_id}``. + Otherwise specify the name in the form: + ``projects/{project}/global/networks/{network_id}``, where + ``{project}`` can either be a project number or a project + ID. + export_custom_routes (bool): + Optional. True if custom routes are exported + to the peered network; false otherwise. The + default value is true. + + This field is a member of `oneof`_ ``_export_custom_routes``. + import_custom_routes (bool): + Optional. True if custom routes are imported + from the peered network; false otherwise. The + default value is true. + + This field is a member of `oneof`_ ``_import_custom_routes``. + exchange_subnet_routes (bool): + Optional. True if full mesh connectivity is + created and managed automatically between peered + networks; false otherwise. Currently this field + is always true because Google Compute Engine + automatically creates and manages subnetwork + routes between two VPC networks when peering + state is 'ACTIVE'. + + This field is a member of `oneof`_ ``_exchange_subnet_routes``. + export_custom_routes_with_public_ip (bool): + Optional. True if all subnet routes with a public IP address + range are exported; false otherwise. The default value is + true. IPv4 special-use ranges + (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are + always exported to peers and are not controlled by this + field. + + This field is a member of `oneof`_ ``_export_custom_routes_with_public_ip``. + import_custom_routes_with_public_ip (bool): + Optional. True if all subnet routes with public IP address + range are imported; false otherwise. The default value is + true. IPv4 special-use ranges + (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are + always imported to peers and are not controlled by this + field. + + This field is a member of `oneof`_ ``_import_custom_routes_with_public_ip``. + state (google.cloud.vmwareengine_v1.types.NetworkPeering.State): + Output only. State of the network peering. + This field has a value of 'ACTIVE' when there's + a matching configuration in the peer network. + New values may be added to this enum when + appropriate. + state_details (str): + Output only. Output Only. Details about the + current state of the network peering. + peer_mtu (int): + Optional. Maximum transmission unit (MTU) in bytes. The + default value is ``1500``. If a value of ``0`` is provided + for this field, VMware Engine uses the default value + instead. + peer_network_type (google.cloud.vmwareengine_v1.types.NetworkPeering.PeerNetworkType): + Required. The type of the network to peer + with the VMware Engine network. + uid (str): + Output only. System-generated unique + identifier for the resource. + vmware_engine_network (str): + Required. The relative resource name of the VMware Engine + network. Specify the name in the following form: + ``projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network_id}`` + where ``{project}`` can either be a project number or a + project ID. + description (str): + Optional. User-provided description for this + network peering. """ class State(proto.Enum): - r"""State of the appliance + r"""Possible states of a network peering. Values: STATE_UNSPECIFIED (0): - Unspecified appliance state. This is the - default value. - ACTIVE (1): - The appliance is operational and can be used. - CREATING (2): - The appliance is being deployed. + Unspecified network peering state. This is + the default value. + INACTIVE (1): + The peering is not active. + ACTIVE (2): + The peering is active. + CREATING (3): + The peering is being created. + DELETING (4): + The peering is being deleted. """ STATE_UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 + INACTIVE = 1 + ACTIVE = 2 + CREATING = 3 + DELETING = 4 - internal_ip: str = proto.Field( + class PeerNetworkType(proto.Enum): + r"""Type or purpose of the network peering connection. + + Values: + PEER_NETWORK_TYPE_UNSPECIFIED (0): + Unspecified + STANDARD (1): + Peering connection used for connecting to + another VPC network established by the same + user. For example, a peering connection to + another VPC network in the same project or to an + on-premises network. + VMWARE_ENGINE_NETWORK (2): + Peering connection used for connecting to + another VMware Engine network. + PRIVATE_SERVICES_ACCESS (3): + Peering connection used for establishing `private services + access `__. + NETAPP_CLOUD_VOLUMES (4): + Peering connection used for connecting to + NetApp Cloud Volumes. + THIRD_PARTY_SERVICE (5): + Peering connection used for connecting to + third-party services. Most third-party services + require manual setup of reverse peering on the + VPC network associated with the third-party + service. + DELL_POWERSCALE (6): + Peering connection used for connecting to + Dell PowerScale Filers + """ + PEER_NETWORK_TYPE_UNSPECIFIED = 0 + STANDARD = 1 + VMWARE_ENGINE_NETWORK = 2 + PRIVATE_SERVICES_ACCESS = 3 + NETAPP_CLOUD_VOLUMES = 4 + THIRD_PARTY_SERVICE = 5 + DELL_POWERSCALE = 6 + + name: str = proto.Field( proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, number=2, + message=timestamp_pb2.Timestamp, ) - version: str = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + peer_network: str = proto.Field( proto.STRING, - number=4, + number=5, + ) + export_custom_routes: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + import_custom_routes: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) + exchange_subnet_routes: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + export_custom_routes_with_public_ip: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + import_custom_routes_with_public_ip: bool = proto.Field( + proto.BOOL, + number=12, + optional=True, ) state: State = proto.Field( proto.ENUM, - number=5, + number=13, enum=State, ) - fqdn: str = proto.Field( + state_details: str = proto.Field( proto.STRING, - number=6, + number=7, + ) + peer_mtu: int = proto.Field( + proto.INT32, + number=14, + ) + peer_network_type: PeerNetworkType = proto.Field( + proto.ENUM, + number=16, + enum=PeerNetworkType, + ) + uid: str = proto.Field( + proto.STRING, + number=17, + ) + vmware_engine_network: str = proto.Field( + proto.STRING, + number=20, + ) + description: str = proto.Field( + proto.STRING, + number=21, ) @@ -1051,6 +1988,123 @@ class State(proto.Enum): ) +class ManagementDnsZoneBinding(proto.Message): + r"""Represents a binding between a network and the management DNS + zone. A management DNS zone is the Cloud DNS cross-project + binding zone that VMware Engine creates for each private cloud. + It contains FQDNs and corresponding IP addresses for the private + cloud's ESXi hosts and management VM appliances like vCenter and + NSX Manager. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The resource name of this binding. Resource + names are schemeless URIs that follow the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of this resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of this + resource. + state (google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding.State): + Output only. The state of the resource. + description (str): + User-provided description for this resource. + vpc_network (str): + Network to bind is a standard consumer VPC. Specify the name + in the following form for consumer VPC network: + ``projects/{project}/global/networks/{network_id}``. + ``{project}`` can either be a project number or a project + ID. + + This field is a member of `oneof`_ ``bind_network``. + vmware_engine_network (str): + Network to bind is a VMware Engine network. Specify the name + in the following form for VMware engine network: + ``projects/{project}/locations/global/vmwareEngineNetworks/{vmware_engine_network_id}``. + ``{project}`` can either be a project number or a project + ID. + + This field is a member of `oneof`_ ``bind_network``. + uid (str): + Output only. System-generated unique + identifier for the resource. + """ + + class State(proto.Enum): + r"""Enum State defines possible states of binding between the + consumer VPC network and the management DNS zone. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value should never be + used. + ACTIVE (1): + The binding is ready. + CREATING (2): + The binding is being created. + UPDATING (3): + The binding is being updated. + DELETING (4): + The binding is being deleted. + FAILED (5): + The binding has failed. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 + FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + description: str = proto.Field( + proto.STRING, + number=13, + ) + vpc_network: str = proto.Field( + proto.STRING, + number=14, + oneof="bind_network", + ) + vmware_engine_network: str = proto.Field( + proto.STRING, + number=15, + oneof="bind_network", + ) + uid: str = proto.Field( + proto.STRING, + number=9, + ) + + class VmwareEngineNetwork(proto.Message): r"""VMware Engine network resource that provides connectivity for VMware Engine private clouds. @@ -1126,9 +2180,13 @@ class Type(proto.Enum): without a network of type ``STANDARD``. This network type is no longer used for new VMware Engine private cloud deployments. + STANDARD (2): + Standard network type used for private cloud + connectivity. """ TYPE_UNSPECIFIED = 0 LEGACY = 1 + STANDARD = 2 class VpcNetwork(proto.Message): r"""Represents a VMware Engine VPC network that is managed by a @@ -1444,4 +2502,104 @@ class PeeringState(proto.Enum): ) +class LocationMetadata(proto.Message): + r"""VmwareEngine specific metadata for the given + [google.cloud.location.Location][google.cloud.location.Location]. It + is returned as a content of the + ``google.cloud.location.Location.metadata`` field. + + Attributes: + capabilities (MutableSequence[google.cloud.vmwareengine_v1.types.LocationMetadata.Capability]): + Output only. Capabilities of this location. + """ + + class Capability(proto.Enum): + r"""Capability of a location. + + Values: + CAPABILITY_UNSPECIFIED (0): + The default value. This value is used if the + capability is omitted or unknown. + STRETCHED_CLUSTERS (1): + Stretch clusters are supported in this + location. + """ + CAPABILITY_UNSPECIFIED = 0 + STRETCHED_CLUSTERS = 1 + + capabilities: MutableSequence[Capability] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Capability, + ) + + +class DnsBindPermission(proto.Message): + r"""DnsBindPermission resource that contains the accounts having + the consumer DNS bind permission on the corresponding intranet + VPC of the consumer project. + + Attributes: + name (str): + Required. Output only. The name of the resource which stores + the users/service accounts having the permission to bind to + the corresponding intranet VPC of the consumer project. + DnsBindPermission is a global resource and location can only + be global. Resource names are schemeless URIs that follow + the conventions in + https://cloud.google.com/apis/design/resource_names. For + example: + ``projects/my-project/locations/global/dnsBindPermission`` + principals (MutableSequence[google.cloud.vmwareengine_v1.types.Principal]): + Output only. Users/Service accounts which + have access for binding on the intranet VPC + project corresponding to the consumer project. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + principals: MutableSequence["Principal"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Principal", + ) + + +class Principal(proto.Message): + r"""Users/Service accounts which have access for DNS binding on + the intranet VPC corresponding to the consumer project. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + user (str): + The user who needs to be granted permission. + + This field is a member of `oneof`_ ``principal``. + service_account (str): + The service account which needs to be granted + the permission. + + This field is a member of `oneof`_ ``principal``. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + oneof="principal", + ) + service_account: str = proto.Field( + proto.STRING, + number=2, + oneof="principal", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-vmwareengine/noxfile.py b/packages/google-cloud-vmwareengine/noxfile.py index 7d3551347c78..1e6cd48d0529 100644 --- a/packages/google-cloud-vmwareengine/noxfile.py +++ b/packages/google-cloud-vmwareengine/noxfile.py @@ -282,6 +282,15 @@ def docs(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", "alabaster", "recommonmark", @@ -308,6 +317,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json index 0f01a51ff0a1..ec46d5ac74fd 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/snippet_metadata_google.cloud.vmwareengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-vmwareengine", - "version": "1.2.0" + "version": "1.3.0" }, "snippets": [ { @@ -196,30 +196,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_hcx_activation_key", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_external_access_rule", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateHcxActivationKey" + "shortName": "CreateExternalAccessRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateExternalAccessRuleRequest" }, { "name": "parent", "type": "str" }, { - "name": "hcx_activation_key", - "type": "google.cloud.vmwareengine_v1.types.HcxActivationKey" + "name": "external_access_rule", + "type": "google.cloud.vmwareengine_v1.types.ExternalAccessRule" }, { - "name": "hcx_activation_key_id", + "name": "external_access_rule_id", "type": "str" }, { @@ -236,13 +236,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_hcx_activation_key" + "shortName": "create_external_access_rule" }, - "description": "Sample for CreateHcxActivationKey", - "file": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_async.py", + "description": "Sample for CreateExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_create_external_access_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateHcxActivationKey_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_async", "segments": [ { "end": 56, @@ -275,7 +275,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_external_access_rule_async.py" }, { "canonical": true, @@ -284,30 +284,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_hcx_activation_key", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_external_access_rule", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAccessRule", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateHcxActivationKey" + "shortName": "CreateExternalAccessRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateExternalAccessRuleRequest" }, { "name": "parent", "type": "str" }, { - "name": "hcx_activation_key", - "type": "google.cloud.vmwareengine_v1.types.HcxActivationKey" + "name": "external_access_rule", + "type": "google.cloud.vmwareengine_v1.types.ExternalAccessRule" }, { - "name": "hcx_activation_key_id", + "name": "external_access_rule_id", "type": "str" }, { @@ -324,13 +324,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_hcx_activation_key" + "shortName": "create_external_access_rule" }, - "description": "Sample for CreateHcxActivationKey", - "file": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_sync.py", + "description": "Sample for CreateExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_create_external_access_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateHcxActivationKey_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_sync", "segments": [ { "end": 56, @@ -363,7 +363,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_external_access_rule_sync.py" }, { "canonical": true, @@ -373,30 +373,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_external_address", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateNetworkPolicy" + "shortName": "CreateExternalAddress" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateExternalAddressRequest" }, { "name": "parent", "type": "str" }, { - "name": "network_policy", - "type": "google.cloud.vmwareengine_v1.types.NetworkPolicy" + "name": "external_address", + "type": "google.cloud.vmwareengine_v1.types.ExternalAddress" }, { - "name": "network_policy_id", + "name": "external_address_id", "type": "str" }, { @@ -413,21 +413,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_network_policy" + "shortName": "create_external_address" }, - "description": "Sample for CreateNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_create_network_policy_async.py", + "description": "Sample for CreateExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_create_external_address_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPolicy_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_async", "segments": [ { - "end": 60, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 56, "start": 27, "type": "SHORT" }, @@ -437,22 +437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_network_policy_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_external_address_async.py" }, { "canonical": true, @@ -461,30 +461,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_external_address", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateExternalAddress", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateNetworkPolicy" + "shortName": "CreateExternalAddress" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateExternalAddressRequest" }, { "name": "parent", "type": "str" }, { - "name": "network_policy", - "type": "google.cloud.vmwareengine_v1.types.NetworkPolicy" + "name": "external_address", + "type": "google.cloud.vmwareengine_v1.types.ExternalAddress" }, { - "name": "network_policy_id", + "name": "external_address_id", "type": "str" }, { @@ -501,21 +501,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_network_policy" + "shortName": "create_external_address" }, - "description": "Sample for CreateNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_create_network_policy_sync.py", + "description": "Sample for CreateExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_create_external_address_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPolicy_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_sync", "segments": [ { - "end": 60, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 56, "start": 27, "type": "SHORT" }, @@ -525,22 +525,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_network_policy_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_external_address_sync.py" }, { "canonical": true, @@ -550,30 +550,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_hcx_activation_key", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreatePrivateCloud" + "shortName": "CreateHcxActivationKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreatePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest" }, { "name": "parent", "type": "str" }, { - "name": "private_cloud", - "type": "google.cloud.vmwareengine_v1.types.PrivateCloud" + "name": "hcx_activation_key", + "type": "google.cloud.vmwareengine_v1.types.HcxActivationKey" }, { - "name": "private_cloud_id", + "name": "hcx_activation_key_id", "type": "str" }, { @@ -590,21 +590,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_private_cloud" + "shortName": "create_hcx_activation_key" }, - "description": "Sample for CreatePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py", + "description": "Sample for CreateHcxActivationKey", + "file": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateHcxActivationKey_async", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -614,22 +614,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_async.py" }, { "canonical": true, @@ -638,30 +638,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_hcx_activation_key", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateHcxActivationKey", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreatePrivateCloud" + "shortName": "CreateHcxActivationKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreatePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateHcxActivationKeyRequest" }, { "name": "parent", "type": "str" }, { - "name": "private_cloud", - "type": "google.cloud.vmwareengine_v1.types.PrivateCloud" + "name": "hcx_activation_key", + "type": "google.cloud.vmwareengine_v1.types.HcxActivationKey" }, { - "name": "private_cloud_id", + "name": "hcx_activation_key_id", "type": "str" }, { @@ -678,21 +678,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_private_cloud" + "shortName": "create_hcx_activation_key" }, - "description": "Sample for CreatePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py", + "description": "Sample for CreateHcxActivationKey", + "file": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateHcxActivationKey_sync", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -702,22 +702,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_hcx_activation_key_sync.py" }, { "canonical": true, @@ -727,30 +727,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_logging_server", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreatePrivateConnection" + "shortName": "CreateLoggingServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateLoggingServerRequest" }, { "name": "parent", "type": "str" }, { - "name": "private_connection", - "type": "google.cloud.vmwareengine_v1.types.PrivateConnection" + "name": "logging_server", + "type": "google.cloud.vmwareengine_v1.types.LoggingServer" }, { - "name": "private_connection_id", + "name": "logging_server_id", "type": "str" }, { @@ -767,21 +767,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_private_connection" + "shortName": "create_logging_server" }, - "description": "Sample for CreatePrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_create_private_connection_async.py", + "description": "Sample for CreateLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_create_logging_server_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateConnection_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_async", "segments": [ { - "end": 62, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 63, "start": 27, "type": "SHORT" }, @@ -791,22 +791,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_private_connection_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_logging_server_async.py" }, { "canonical": true, @@ -815,30 +815,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_logging_server", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateLoggingServer", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreatePrivateConnection" + "shortName": "CreateLoggingServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateLoggingServerRequest" }, { "name": "parent", "type": "str" }, { - "name": "private_connection", - "type": "google.cloud.vmwareengine_v1.types.PrivateConnection" + "name": "logging_server", + "type": "google.cloud.vmwareengine_v1.types.LoggingServer" }, { - "name": "private_connection_id", + "name": "logging_server_id", "type": "str" }, { @@ -855,21 +855,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_private_connection" + "shortName": "create_logging_server" }, - "description": "Sample for CreatePrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_create_private_connection_sync.py", + "description": "Sample for CreateLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_create_logging_server_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateConnection_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_sync", "segments": [ { - "end": 62, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 63, "start": 27, "type": "SHORT" }, @@ -879,22 +879,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_private_connection_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_logging_server_sync.py" }, { "canonical": true, @@ -904,30 +904,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateVmwareEngineNetwork" + "shortName": "CreateManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateManagementDnsZoneBindingRequest" }, { "name": "parent", "type": "str" }, { - "name": "vmware_engine_network", - "type": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork" + "name": "management_dns_zone_binding", + "type": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding" }, { - "name": "vmware_engine_network_id", + "name": "management_dns_zone_binding_id", "type": "str" }, { @@ -944,13 +944,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_vmware_engine_network" + "shortName": "create_management_dns_zone_binding" }, - "description": "Sample for CreateVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py", + "description": "Sample for CreateManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateVmwareEngineNetwork_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_async", "segments": [ { "end": 60, @@ -983,7 +983,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_async.py" }, { "canonical": true, @@ -992,30 +992,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "CreateVmwareEngineNetwork" + "shortName": "CreateManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateManagementDnsZoneBindingRequest" }, { "name": "parent", "type": "str" }, { - "name": "vmware_engine_network", - "type": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork" + "name": "management_dns_zone_binding", + "type": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding" }, { - "name": "vmware_engine_network_id", + "name": "management_dns_zone_binding_id", "type": "str" }, { @@ -1032,13 +1032,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_vmware_engine_network" + "shortName": "create_management_dns_zone_binding" }, - "description": "Sample for CreateVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py", + "description": "Sample for CreateManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateVmwareEngineNetwork_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_sync", "segments": [ { "end": 60, @@ -1071,7 +1071,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_sync.py" }, { "canonical": true, @@ -1081,22 +1081,30 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_network_peering", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeleteCluster" + "shortName": "CreateNetworkPeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPeeringRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "network_peering", + "type": "google.cloud.vmwareengine_v1.types.NetworkPeering" + }, + { + "name": "network_peering_id", "type": "str" }, { @@ -1113,21 +1121,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_cluster" + "shortName": "create_network_peering" }, - "description": "Sample for DeleteCluster", - "file": "vmwareengine_v1_generated_vmware_engine_delete_cluster_async.py", + "description": "Sample for CreateNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_create_network_peering_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteCluster_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, @@ -1137,22 +1145,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_cluster_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_create_network_peering_async.py" }, { "canonical": true, @@ -1161,9 +1169,886 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_network_peering", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPeering", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreateNetworkPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPeeringRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "network_peering", + "type": "google.cloud.vmwareengine_v1.types.NetworkPeering" + }, + { + "name": "network_peering_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_network_peering" + }, + "description": "Sample for CreateNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_create_network_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_network_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreateNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "network_policy", + "type": "google.cloud.vmwareengine_v1.types.NetworkPolicy" + }, + { + "name": "network_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_network_policy" + }, + "description": "Sample for CreateNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_create_network_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPolicy_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_network_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreateNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreateNetworkPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "network_policy", + "type": "google.cloud.vmwareengine_v1.types.NetworkPolicy" + }, + { + "name": "network_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_network_policy" + }, + "description": "Sample for CreateNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_create_network_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateNetworkPolicy_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_network_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreatePrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreatePrivateCloudRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_cloud", + "type": "google.cloud.vmwareengine_v1.types.PrivateCloud" + }, + { + "name": "private_cloud_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_cloud" + }, + "description": "Sample for CreatePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreatePrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreatePrivateCloudRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_cloud", + "type": "google.cloud.vmwareengine_v1.types.PrivateCloud" + }, + { + "name": "private_cloud_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_cloud" + }, + "description": "Sample for CreatePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateCloud_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_private_cloud_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.vmwareengine_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_create_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateConnection_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.vmwareengine_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_create_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreatePrivateConnection_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.create_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreateVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vmware_engine_network", + "type": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork" + }, + { + "name": "vmware_engine_network_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_vmware_engine_network" + }, + "description": "Sample for CreateVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateVmwareEngineNetwork_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.create_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.CreateVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "CreateVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.CreateVmwareEngineNetworkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vmware_engine_network", + "type": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork" + }, + { + "name": "vmware_engine_network_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_vmware_engine_network" + }, + "description": "Sample for CreateVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_CreateVmwareEngineNetwork_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_cluster", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "vmwareengine_v1_generated_vmware_engine_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteCluster_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_cluster", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteCluster", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" @@ -1173,10 +2058,4848 @@ "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.DeleteClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" + }, + "description": "Sample for DeleteCluster", + "file": "vmwareengine_v1_generated_vmware_engine_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_external_access_rule", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteExternalAccessRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteExternalAccessRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_external_access_rule" + }, + "description": "Sample for DeleteExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_external_access_rule", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteExternalAccessRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteExternalAccessRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_external_access_rule" + }, + "description": "Sample for DeleteExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_external_address", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteExternalAddress" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteExternalAddressRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_external_address" + }, + "description": "Sample for DeleteExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_delete_external_address_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_external_address_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_external_address", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAddress", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteExternalAddress" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteExternalAddressRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_external_address" + }, + "description": "Sample for DeleteExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_delete_external_address_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_external_address_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_logging_server", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteLoggingServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteLoggingServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_logging_server" + }, + "description": "Sample for DeleteLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_delete_logging_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_logging_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_logging_server", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteLoggingServer", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteLoggingServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteLoggingServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_logging_server" + }, + "description": "Sample for DeleteLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_delete_logging_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_logging_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_management_dns_zone_binding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteManagementDnsZoneBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteManagementDnsZoneBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_management_dns_zone_binding" + }, + "description": "Sample for DeleteManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_management_dns_zone_binding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteManagementDnsZoneBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteManagementDnsZoneBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_management_dns_zone_binding" + }, + "description": "Sample for DeleteManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_network_peering", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteNetworkPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPeeringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_network_peering" + }, + "description": "Sample for DeleteNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_delete_network_peering_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_network_peering_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_network_peering", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPeering", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteNetworkPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPeeringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_network_peering" + }, + "description": "Sample for DeleteNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_delete_network_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_network_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_network_policy" + }, + "description": "Sample for DeleteNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_network_policy" + }, + "description": "Sample for DeleteNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeletePrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeletePrivateCloudRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_cloud" + }, + "description": "Sample for DeletePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateCloud_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeletePrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeletePrivateCloudRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_cloud" + }, + "description": "Sample for DeletePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateCloud_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_vmware_engine_network" + }, + "description": "Sample for DeleteVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteVmwareEngineNetwork_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "DeleteVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_vmware_engine_network" + }, + "description": "Sample for DeleteVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteVmwareEngineNetwork_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.fetch_network_policy_external_addresses", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "FetchNetworkPolicyExternalAddresses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest" + }, + { + "name": "network_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.FetchNetworkPolicyExternalAddressesAsyncPager", + "shortName": "fetch_network_policy_external_addresses" + }, + "description": "Sample for FetchNetworkPolicyExternalAddresses", + "file": "vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.fetch_network_policy_external_addresses", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "FetchNetworkPolicyExternalAddresses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.FetchNetworkPolicyExternalAddressesRequest" + }, + { + "name": "network_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.FetchNetworkPolicyExternalAddressesPager", + "shortName": "fetch_network_policy_external_addresses" + }, + "description": "Sample for FetchNetworkPolicyExternalAddresses", + "file": "vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_cluster", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetCluster", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "vmwareengine_v1_generated_vmware_engine_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetCluster_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_cluster_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_cluster", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetCluster", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Cluster", + "shortName": "get_cluster" + }, + "description": "Sample for GetCluster", + "file": "vmwareengine_v1_generated_vmware_engine_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_dns_bind_permission", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetDnsBindPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetDnsBindPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.DnsBindPermission", + "shortName": "get_dns_bind_permission" + }, + "description": "Sample for GetDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_dns_bind_permission", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetDnsBindPermission", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetDnsBindPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetDnsBindPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.DnsBindPermission", + "shortName": "get_dns_bind_permission" + }, + "description": "Sample for GetDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_dns_forwarding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetDnsForwarding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetDnsForwardingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.DnsForwarding", + "shortName": "get_dns_forwarding" + }, + "description": "Sample for GetDnsForwarding", + "file": "vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_dns_forwarding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetDnsForwarding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetDnsForwarding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetDnsForwardingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.DnsForwarding", + "shortName": "get_dns_forwarding" + }, + "description": "Sample for GetDnsForwarding", + "file": "vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_external_access_rule", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetExternalAccessRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetExternalAccessRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ExternalAccessRule", + "shortName": "get_external_access_rule" + }, + "description": "Sample for GetExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_get_external_access_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_external_access_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_external_access_rule", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAccessRule", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetExternalAccessRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetExternalAccessRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ExternalAccessRule", + "shortName": "get_external_access_rule" + }, + "description": "Sample for GetExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_get_external_access_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_external_access_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_external_address", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetExternalAddress" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetExternalAddressRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ExternalAddress", + "shortName": "get_external_address" + }, + "description": "Sample for GetExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_get_external_address_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_external_address_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_external_address", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetExternalAddress", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetExternalAddress" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetExternalAddressRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ExternalAddress", + "shortName": "get_external_address" + }, + "description": "Sample for GetExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_get_external_address_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_external_address_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_hcx_activation_key", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetHcxActivationKey", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetHcxActivationKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.HcxActivationKey", + "shortName": "get_hcx_activation_key" + }, + "description": "Sample for GetHcxActivationKey", + "file": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetHcxActivationKey_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_hcx_activation_key", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetHcxActivationKey", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetHcxActivationKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.HcxActivationKey", + "shortName": "get_hcx_activation_key" + }, + "description": "Sample for GetHcxActivationKey", + "file": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetHcxActivationKey_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_logging_server", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetLoggingServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetLoggingServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.LoggingServer", + "shortName": "get_logging_server" + }, + "description": "Sample for GetLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_get_logging_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_logging_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_logging_server", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetLoggingServer", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetLoggingServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetLoggingServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.LoggingServer", + "shortName": "get_logging_server" + }, + "description": "Sample for GetLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_get_logging_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_logging_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_management_dns_zone_binding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetManagementDnsZoneBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetManagementDnsZoneBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding", + "shortName": "get_management_dns_zone_binding" + }, + "description": "Sample for GetManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_management_dns_zone_binding", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetManagementDnsZoneBinding", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetManagementDnsZoneBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetManagementDnsZoneBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding", + "shortName": "get_management_dns_zone_binding" + }, + "description": "Sample for GetManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_network_peering", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNetworkPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNetworkPeeringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NetworkPeering", + "shortName": "get_network_peering" + }, + "description": "Sample for GetNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_get_network_peering_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_network_peering_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_network_peering", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPeering", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNetworkPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNetworkPeeringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NetworkPeering", + "shortName": "get_network_peering" + }, + "description": "Sample for GetNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_get_network_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_network_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NetworkPolicy", + "shortName": "get_network_policy" + }, + "description": "Sample for GetNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_get_network_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_network_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_network_policy", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNetworkPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NetworkPolicy", + "shortName": "get_network_policy" + }, + "description": "Sample for GetNetworkPolicy", + "file": "vmwareengine_v1_generated_vmware_engine_get_network_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_network_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_node_type", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNodeType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNodeTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NodeType", + "shortName": "get_node_type" + }, + "description": "Sample for GetNodeType", + "file": "vmwareengine_v1_generated_vmware_engine_get_node_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNodeType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_node_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_node_type", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNodeType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNodeTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.NodeType", + "shortName": "get_node_type" + }, + "description": "Sample for GetNodeType", + "file": "vmwareengine_v1_generated_vmware_engine_get_node_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNodeType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_node_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_node", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNode", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNode" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNodeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Node", + "shortName": "get_node" + }, + "description": "Sample for GetNode", + "file": "vmwareengine_v1_generated_vmware_engine_get_node_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNode_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_node_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_node", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNode", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetNode" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetNodeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Node", + "shortName": "get_node" + }, + "description": "Sample for GetNode", + "file": "vmwareengine_v1_generated_vmware_engine_get_node_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNode_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_node_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetPrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetPrivateCloudRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.PrivateCloud", + "shortName": "get_private_cloud" + }, + "description": "Sample for GetPrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateCloud_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_private_cloud", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetPrivateCloud" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetPrivateCloudRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.PrivateCloud", + "shortName": "get_private_cloud" + }, + "description": "Sample for GetPrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateCloud_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_get_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_private_connection", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "vmwareengine_v1_generated_vmware_engine_get_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_subnet", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetSubnet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetSubnetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Subnet", + "shortName": "get_subnet" + }, + "description": "Sample for GetSubnet", + "file": "vmwareengine_v1_generated_vmware_engine_get_subnet_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetSubnet_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_subnet_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_subnet", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetSubnet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetSubnetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.Subnet", + "shortName": "get_subnet" + }, + "description": "Sample for GetSubnet", + "file": "vmwareengine_v1_generated_vmware_engine_get_subnet_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetSubnet_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_subnet_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork", + "shortName": "get_vmware_engine_network" + }, + "description": "Sample for GetVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetVmwareEngineNetwork_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_vmware_engine_network", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GetVmwareEngineNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork", + "shortName": "get_vmware_engine_network" + }, + "description": "Sample for GetVmwareEngineNetwork", + "file": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetVmwareEngineNetwork_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.grant_dns_bind_permission", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GrantDnsBindPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GrantDnsBindPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "principal", + "type": "google.cloud.vmwareengine_v1.types.Principal" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "grant_dns_bind_permission" + }, + "description": "Sample for GrantDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.grant_dns_bind_permission", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GrantDnsBindPermission", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "GrantDnsBindPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.GrantDnsBindPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "principal", + "type": "google.cloud.vmwareengine_v1.types.Principal" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "grant_dns_bind_permission" + }, + "description": "Sample for GrantDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_clusters", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListClusters", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "vmwareengine_v1_generated_vmware_engine_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListClusters_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_list_clusters_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_clusters", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListClusters", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListClustersPager", + "shortName": "list_clusters" + }, + "description": "Sample for ListClusters", + "file": "vmwareengine_v1_generated_vmware_engine_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_list_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_external_access_rules", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListExternalAccessRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAccessRulesAsyncPager", + "shortName": "list_external_access_rules" + }, + "description": "Sample for ListExternalAccessRules", + "file": "vmwareengine_v1_generated_vmware_engine_list_external_access_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_list_external_access_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_external_access_rules", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAccessRules", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListExternalAccessRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListExternalAccessRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAccessRulesPager", + "shortName": "list_external_access_rules" + }, + "description": "Sample for ListExternalAccessRules", + "file": "vmwareengine_v1_generated_vmware_engine_list_external_access_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_list_external_access_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", + "shortName": "VmwareEngineAsyncClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_external_addresses", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListExternalAddresses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAddressesAsyncPager", + "shortName": "list_external_addresses" + }, + "description": "Sample for ListExternalAddresses", + "file": "vmwareengine_v1_generated_vmware_engine_list_external_addresses_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "vmwareengine_v1_generated_vmware_engine_list_external_addresses_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", + "shortName": "VmwareEngineClient" + }, + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_external_addresses", + "method": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListExternalAddresses", + "service": { + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", + "shortName": "VmwareEngine" + }, + "shortName": "ListExternalAddresses" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.vmwareengine_v1.types.ListExternalAddressesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1192,22 +6915,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_cluster" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListExternalAddressesPager", + "shortName": "list_external_addresses" }, - "description": "Sample for DeleteCluster", - "file": "vmwareengine_v1_generated_vmware_engine_delete_cluster_sync.py", + "description": "Sample for ListExternalAddresses", + "file": "vmwareengine_v1_generated_vmware_engine_list_external_addresses_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteCluster_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1222,17 +6945,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_cluster_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_external_addresses_sync.py" }, { "canonical": true, @@ -1242,22 +6965,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_hcx_activation_keys", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeleteNetworkPolicy" + "shortName": "ListHcxActivationKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1273,22 +6996,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_network_policy" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysAsyncPager", + "shortName": "list_hcx_activation_keys" }, - "description": "Sample for DeleteNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_async.py", + "description": "Sample for ListHcxActivationKeys", + "file": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPolicy_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListHcxActivationKeys_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1303,17 +7026,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_async.py" }, { "canonical": true, @@ -1322,22 +7045,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_hcx_activation_keys", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeleteNetworkPolicy" + "shortName": "ListHcxActivationKeys" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1353,22 +7076,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_network_policy" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysPager", + "shortName": "list_hcx_activation_keys" }, - "description": "Sample for DeleteNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_sync.py", + "description": "Sample for ListHcxActivationKeys", + "file": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPolicy_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListHcxActivationKeys_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1383,17 +7106,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_network_policy_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_sync.py" }, { "canonical": true, @@ -1403,22 +7126,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_logging_servers", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeletePrivateCloud" + "shortName": "ListLoggingServers" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeletePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.ListLoggingServersRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1434,22 +7157,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_private_cloud" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListLoggingServersAsyncPager", + "shortName": "list_logging_servers" }, - "description": "Sample for DeletePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_async.py", + "description": "Sample for ListLoggingServers", + "file": "vmwareengine_v1_generated_vmware_engine_list_logging_servers_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateCloud_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1464,17 +7187,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_logging_servers_async.py" }, { "canonical": true, @@ -1483,22 +7206,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_logging_servers", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListLoggingServers", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeletePrivateCloud" + "shortName": "ListLoggingServers" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeletePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.ListLoggingServersRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1514,22 +7237,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_private_cloud" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListLoggingServersPager", + "shortName": "list_logging_servers" }, - "description": "Sample for DeletePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_sync.py", + "description": "Sample for ListLoggingServers", + "file": "vmwareengine_v1_generated_vmware_engine_list_logging_servers_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateCloud_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1544,17 +7267,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_private_cloud_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_logging_servers_sync.py" }, { "canonical": true, @@ -1564,22 +7287,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_management_dns_zone_bindings", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeletePrivateConnection" + "shortName": "ListManagementDnsZoneBindings" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1595,22 +7318,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_private_connection" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListManagementDnsZoneBindingsAsyncPager", + "shortName": "list_management_dns_zone_bindings" }, - "description": "Sample for DeletePrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_async.py", + "description": "Sample for ListManagementDnsZoneBindings", + "file": "vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateConnection_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1625,17 +7348,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_async.py" }, { "canonical": true, @@ -1644,22 +7367,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_management_dns_zone_bindings", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeletePrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListManagementDnsZoneBindings", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeletePrivateConnection" + "shortName": "ListManagementDnsZoneBindings" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeletePrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.ListManagementDnsZoneBindingsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1675,22 +7398,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_private_connection" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListManagementDnsZoneBindingsPager", + "shortName": "list_management_dns_zone_bindings" }, - "description": "Sample for DeletePrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_sync.py", + "description": "Sample for ListManagementDnsZoneBindings", + "file": "vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeletePrivateConnection_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1705,17 +7428,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_private_connection_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_sync.py" }, { "canonical": true, @@ -1725,22 +7448,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.delete_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_network_peerings", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeleteVmwareEngineNetwork" + "shortName": "ListNetworkPeerings" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1756,22 +7479,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_vmware_engine_network" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPeeringsAsyncPager", + "shortName": "list_network_peerings" }, - "description": "Sample for DeleteVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_async.py", + "description": "Sample for ListNetworkPeerings", + "file": "vmwareengine_v1_generated_vmware_engine_list_network_peerings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteVmwareEngineNetwork_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1786,17 +7509,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_network_peerings_async.py" }, { "canonical": true, @@ -1805,22 +7528,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.delete_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_network_peerings", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.DeleteVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPeerings", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "DeleteVmwareEngineNetwork" + "shortName": "ListNetworkPeerings" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.DeleteVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNetworkPeeringsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1836,22 +7559,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_vmware_engine_network" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPeeringsPager", + "shortName": "list_network_peerings" }, - "description": "Sample for DeleteVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_sync.py", + "description": "Sample for ListNetworkPeerings", + "file": "vmwareengine_v1_generated_vmware_engine_list_network_peerings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_DeleteVmwareEngineNetwork_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1866,17 +7589,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_delete_vmware_engine_network_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_network_peerings_sync.py" }, { "canonical": true, @@ -1886,22 +7609,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_network_policies", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetCluster" + "shortName": "ListNetworkPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1917,22 +7640,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Cluster", - "shortName": "get_cluster" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesAsyncPager", + "shortName": "list_network_policies" }, - "description": "Sample for GetCluster", - "file": "vmwareengine_v1_generated_vmware_engine_get_cluster_async.py", + "description": "Sample for ListNetworkPolicies", + "file": "vmwareengine_v1_generated_vmware_engine_list_network_policies_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetCluster_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPolicies_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1952,12 +7675,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_cluster_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_network_policies_async.py" }, { "canonical": true, @@ -1966,22 +7689,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_network_policies", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetCluster" + "shortName": "ListNetworkPolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1997,22 +7720,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Cluster", - "shortName": "get_cluster" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesPager", + "shortName": "list_network_policies" }, - "description": "Sample for GetCluster", - "file": "vmwareengine_v1_generated_vmware_engine_get_cluster_sync.py", + "description": "Sample for ListNetworkPolicies", + "file": "vmwareengine_v1_generated_vmware_engine_list_network_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetCluster_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPolicies_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2032,12 +7755,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_cluster_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_network_policies_sync.py" }, { "canonical": true, @@ -2047,22 +7770,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_hcx_activation_key", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_node_types", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetHcxActivationKey", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetHcxActivationKey" + "shortName": "ListNodeTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNodeTypesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2078,22 +7801,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.HcxActivationKey", - "shortName": "get_hcx_activation_key" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesAsyncPager", + "shortName": "list_node_types" }, - "description": "Sample for GetHcxActivationKey", - "file": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_async.py", + "description": "Sample for ListNodeTypes", + "file": "vmwareengine_v1_generated_vmware_engine_list_node_types_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetHcxActivationKey_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodeTypes_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2113,12 +7836,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_node_types_async.py" }, { "canonical": true, @@ -2127,22 +7850,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_hcx_activation_key", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_node_types", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetHcxActivationKey", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetHcxActivationKey" + "shortName": "ListNodeTypes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetHcxActivationKeyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNodeTypesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2158,22 +7881,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.HcxActivationKey", - "shortName": "get_hcx_activation_key" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesPager", + "shortName": "list_node_types" }, - "description": "Sample for GetHcxActivationKey", - "file": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_sync.py", + "description": "Sample for ListNodeTypes", + "file": "vmwareengine_v1_generated_vmware_engine_list_node_types_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetHcxActivationKey_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodeTypes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2193,12 +7916,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_hcx_activation_key_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_node_types_sync.py" }, { "canonical": true, @@ -2208,22 +7931,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_nodes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetNetworkPolicy" + "shortName": "ListNodes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNodesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2239,22 +7962,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.NetworkPolicy", - "shortName": "get_network_policy" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodesAsyncPager", + "shortName": "list_nodes" }, - "description": "Sample for GetNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_get_network_policy_async.py", + "description": "Sample for ListNodes", + "file": "vmwareengine_v1_generated_vmware_engine_list_nodes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPolicy_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodes_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2274,12 +7997,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_network_policy_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_nodes_async.py" }, { "canonical": true, @@ -2288,22 +8011,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_network_policy", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_nodes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNetworkPolicy", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetNetworkPolicy" + "shortName": "ListNodes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetNetworkPolicyRequest" + "type": "google.cloud.vmwareengine_v1.types.ListNodesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2319,22 +8042,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.NetworkPolicy", - "shortName": "get_network_policy" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodesPager", + "shortName": "list_nodes" }, - "description": "Sample for GetNetworkPolicy", - "file": "vmwareengine_v1_generated_vmware_engine_get_network_policy_sync.py", + "description": "Sample for ListNodes", + "file": "vmwareengine_v1_generated_vmware_engine_list_nodes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNetworkPolicy_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2354,12 +8077,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_network_policy_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_nodes_sync.py" }, { "canonical": true, @@ -2369,22 +8092,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_node_type", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_peering_routes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetNodeType" + "shortName": "ListPeeringRoutes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetNodeTypeRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2400,22 +8123,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.NodeType", - "shortName": "get_node_type" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPeeringRoutesAsyncPager", + "shortName": "list_peering_routes" }, - "description": "Sample for GetNodeType", - "file": "vmwareengine_v1_generated_vmware_engine_get_node_type_async.py", + "description": "Sample for ListPeeringRoutes", + "file": "vmwareengine_v1_generated_vmware_engine_list_peering_routes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNodeType_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2435,12 +8158,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_node_type_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_peering_routes_async.py" }, { "canonical": true, @@ -2449,22 +8172,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_node_type", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_peering_routes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetNodeType", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPeeringRoutes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetNodeType" + "shortName": "ListPeeringRoutes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetNodeTypeRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPeeringRoutesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2480,22 +8203,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.NodeType", - "shortName": "get_node_type" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPeeringRoutesPager", + "shortName": "list_peering_routes" }, - "description": "Sample for GetNodeType", - "file": "vmwareengine_v1_generated_vmware_engine_get_node_type_sync.py", + "description": "Sample for ListPeeringRoutes", + "file": "vmwareengine_v1_generated_vmware_engine_list_peering_routes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetNodeType_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2515,12 +8238,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_node_type_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_peering_routes_sync.py" }, { "canonical": true, @@ -2530,22 +8253,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_clouds", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetPrivateCloud" + "shortName": "ListPrivateClouds" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetPrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateCloudsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2561,22 +8284,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.PrivateCloud", - "shortName": "get_private_cloud" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateCloudsAsyncPager", + "shortName": "list_private_clouds" }, - "description": "Sample for GetPrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_async.py", + "description": "Sample for ListPrivateClouds", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateCloud_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateClouds_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2596,12 +8319,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_async.py" }, { "canonical": true, @@ -2610,22 +8333,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_clouds", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetPrivateCloud" + "shortName": "ListPrivateClouds" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetPrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateCloudsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2641,22 +8364,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.PrivateCloud", - "shortName": "get_private_cloud" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateCloudsPager", + "shortName": "list_private_clouds" }, - "description": "Sample for GetPrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_sync.py", + "description": "Sample for ListPrivateClouds", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateCloud_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateClouds_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2676,12 +8399,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_private_cloud_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_sync.py" }, { "canonical": true, @@ -2691,22 +8414,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_connection_peering_routes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetPrivateConnection" + "shortName": "ListPrivateConnectionPeeringRoutes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2722,22 +8445,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.PrivateConnection", - "shortName": "get_private_connection" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesAsyncPager", + "shortName": "list_private_connection_peering_routes" }, - "description": "Sample for GetPrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_get_private_connection_async.py", + "description": "Sample for ListPrivateConnectionPeeringRoutes", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateConnection_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnectionPeeringRoutes_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2757,12 +8480,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_private_connection_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_async.py" }, { "canonical": true, @@ -2771,22 +8494,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_private_connection", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_connection_peering_routes", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetPrivateConnection", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetPrivateConnection" + "shortName": "ListPrivateConnectionPeeringRoutes" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetPrivateConnectionRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2802,22 +8525,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.PrivateConnection", - "shortName": "get_private_connection" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesPager", + "shortName": "list_private_connection_peering_routes" }, - "description": "Sample for GetPrivateConnection", - "file": "vmwareengine_v1_generated_vmware_engine_get_private_connection_sync.py", + "description": "Sample for ListPrivateConnectionPeeringRoutes", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetPrivateConnection_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnectionPeeringRoutes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2837,12 +8560,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_private_connection_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_sync.py" }, { "canonical": true, @@ -2852,22 +8575,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_subnet", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_connections", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetSubnet" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetSubnetRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2883,22 +8606,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Subnet", - "shortName": "get_subnet" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsAsyncPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetSubnet", - "file": "vmwareengine_v1_generated_vmware_engine_get_subnet_async.py", + "description": "Sample for ListPrivateConnections", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_connections_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetSubnet_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnections_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2918,12 +8641,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_subnet_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_connections_async.py" }, { "canonical": true, @@ -2932,22 +8655,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_subnet", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_connections", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetSubnet", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetSubnet" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetSubnetRequest" + "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2963,22 +8686,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Subnet", - "shortName": "get_subnet" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetSubnet", - "file": "vmwareengine_v1_generated_vmware_engine_get_subnet_sync.py", + "description": "Sample for ListPrivateConnections", + "file": "vmwareengine_v1_generated_vmware_engine_list_private_connections_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetSubnet_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnections_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2998,12 +8721,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_subnet_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_private_connections_sync.py" }, { "canonical": true, @@ -3013,22 +8736,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.get_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_subnets", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetVmwareEngineNetwork" + "shortName": "ListSubnets" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.ListSubnetsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3044,22 +8767,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork", - "shortName": "get_vmware_engine_network" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsAsyncPager", + "shortName": "list_subnets" }, - "description": "Sample for GetVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_async.py", + "description": "Sample for ListSubnets", + "file": "vmwareengine_v1_generated_vmware_engine_list_subnets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetVmwareEngineNetwork_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListSubnets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3079,12 +8802,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_subnets_async.py" }, { "canonical": true, @@ -3093,22 +8816,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.get_vmware_engine_network", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_subnets", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.GetVmwareEngineNetwork", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "GetVmwareEngineNetwork" + "shortName": "ListSubnets" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.GetVmwareEngineNetworkRequest" + "type": "google.cloud.vmwareengine_v1.types.ListSubnetsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3124,22 +8847,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.VmwareEngineNetwork", - "shortName": "get_vmware_engine_network" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsPager", + "shortName": "list_subnets" }, - "description": "Sample for GetVmwareEngineNetwork", - "file": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_sync.py", + "description": "Sample for ListSubnets", + "file": "vmwareengine_v1_generated_vmware_engine_list_subnets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_GetVmwareEngineNetwork_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListSubnets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3159,12 +8882,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_get_vmware_engine_network_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_subnets_sync.py" }, { "canonical": true, @@ -3174,19 +8897,19 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_clusters", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_vmware_engine_networks", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListClusters", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListClusters" + "shortName": "ListVmwareEngineNetworks" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListClustersRequest" + "type": "google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest" }, { "name": "parent", @@ -3205,14 +8928,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListClustersAsyncPager", - "shortName": "list_clusters" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksAsyncPager", + "shortName": "list_vmware_engine_networks" }, - "description": "Sample for ListClusters", - "file": "vmwareengine_v1_generated_vmware_engine_list_clusters_async.py", + "description": "Sample for ListVmwareEngineNetworks", + "file": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListClusters_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListVmwareEngineNetworks_async", "segments": [ { "end": 52, @@ -3245,7 +8968,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_clusters_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_async.py" }, { "canonical": true, @@ -3254,19 +8977,19 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_clusters", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_vmware_engine_networks", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListClusters", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListClusters" + "shortName": "ListVmwareEngineNetworks" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListClustersRequest" + "type": "google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest" }, { "name": "parent", @@ -3285,14 +9008,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListClustersPager", - "shortName": "list_clusters" + "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksPager", + "shortName": "list_vmware_engine_networks" }, - "description": "Sample for ListClusters", - "file": "vmwareengine_v1_generated_vmware_engine_list_clusters_sync.py", + "description": "Sample for ListVmwareEngineNetworks", + "file": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListClusters_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListVmwareEngineNetworks_sync", "segments": [ { "end": 52, @@ -3325,7 +9048,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_clusters_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_sync.py" }, { "canonical": true, @@ -3335,22 +9058,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_hcx_activation_keys", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.repair_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.RepairManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListHcxActivationKeys" + "shortName": "RepairManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest" + "type": "google.cloud.vmwareengine_v1.types.RepairManagementDnsZoneBindingRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3366,22 +9089,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysAsyncPager", - "shortName": "list_hcx_activation_keys" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "repair_management_dns_zone_binding" }, - "description": "Sample for ListHcxActivationKeys", - "file": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_async.py", + "description": "Sample for RepairManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListHcxActivationKeys_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3396,17 +9119,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_async.py" }, { "canonical": true, @@ -3415,22 +9138,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_hcx_activation_keys", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.repair_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListHcxActivationKeys", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.RepairManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListHcxActivationKeys" + "shortName": "RepairManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListHcxActivationKeysRequest" + "type": "google.cloud.vmwareengine_v1.types.RepairManagementDnsZoneBindingRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3446,22 +9169,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListHcxActivationKeysPager", - "shortName": "list_hcx_activation_keys" + "resultType": "google.api_core.operation.Operation", + "shortName": "repair_management_dns_zone_binding" }, - "description": "Sample for ListHcxActivationKeys", - "file": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_sync.py", + "description": "Sample for RepairManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListHcxActivationKeys_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3476,17 +9199,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_hcx_activation_keys_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_sync.py" }, { "canonical": true, @@ -3496,22 +9219,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_network_policies", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.reset_nsx_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListNetworkPolicies" + "shortName": "ResetNsxCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest" + "type": "google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -3527,22 +9250,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesAsyncPager", - "shortName": "list_network_policies" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reset_nsx_credentials" }, - "description": "Sample for ListNetworkPolicies", - "file": "vmwareengine_v1_generated_vmware_engine_list_network_policies_async.py", + "description": "Sample for ResetNsxCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPolicies_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetNsxCredentials_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3557,17 +9280,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_network_policies_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_async.py" }, { "canonical": true, @@ -3576,22 +9299,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_network_policies", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.reset_nsx_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNetworkPolicies", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListNetworkPolicies" + "shortName": "ResetNsxCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListNetworkPoliciesRequest" + "type": "google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -3607,22 +9330,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNetworkPoliciesPager", - "shortName": "list_network_policies" + "resultType": "google.api_core.operation.Operation", + "shortName": "reset_nsx_credentials" }, - "description": "Sample for ListNetworkPolicies", - "file": "vmwareengine_v1_generated_vmware_engine_list_network_policies_sync.py", + "description": "Sample for ResetNsxCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNetworkPolicies_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetNsxCredentials_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3637,17 +9360,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_network_policies_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_sync.py" }, { "canonical": true, @@ -3657,22 +9380,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_node_types", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.reset_vcenter_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListNodeTypes" + "shortName": "ResetVcenterCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListNodeTypesRequest" + "type": "google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -3688,22 +9411,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesAsyncPager", - "shortName": "list_node_types" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reset_vcenter_credentials" }, - "description": "Sample for ListNodeTypes", - "file": "vmwareengine_v1_generated_vmware_engine_list_node_types_async.py", + "description": "Sample for ResetVcenterCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodeTypes_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetVcenterCredentials_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3718,17 +9441,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_node_types_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_async.py" }, { "canonical": true, @@ -3737,22 +9460,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_node_types", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.reset_vcenter_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListNodeTypes", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListNodeTypes" + "shortName": "ResetVcenterCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListNodeTypesRequest" + "type": "google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -3768,22 +9491,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListNodeTypesPager", - "shortName": "list_node_types" + "resultType": "google.api_core.operation.Operation", + "shortName": "reset_vcenter_credentials" }, - "description": "Sample for ListNodeTypes", - "file": "vmwareengine_v1_generated_vmware_engine_list_node_types_sync.py", + "description": "Sample for ResetVcenterCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListNodeTypes_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetVcenterCredentials_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3798,17 +9521,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_node_types_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_sync.py" }, { "canonical": true, @@ -3818,24 +9541,28 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_clouds", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.revoke_dns_bind_permission", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateClouds" + "shortName": "RevokeDnsBindPermission" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateCloudsRequest" + "type": "google.cloud.vmwareengine_v1.types.RevokeDnsBindPermissionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, + { + "name": "principal", + "type": "google.cloud.vmwareengine_v1.types.Principal" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3849,22 +9576,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateCloudsAsyncPager", - "shortName": "list_private_clouds" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "revoke_dns_bind_permission" }, - "description": "Sample for ListPrivateClouds", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_async.py", + "description": "Sample for RevokeDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateClouds_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3874,22 +9601,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_async.py" }, { "canonical": true, @@ -3898,24 +9625,28 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_clouds", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.revoke_dns_bind_permission", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateClouds", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.RevokeDnsBindPermission", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateClouds" + "shortName": "RevokeDnsBindPermission" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateCloudsRequest" + "type": "google.cloud.vmwareengine_v1.types.RevokeDnsBindPermissionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, + { + "name": "principal", + "type": "google.cloud.vmwareengine_v1.types.Principal" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3929,22 +9660,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateCloudsPager", - "shortName": "list_private_clouds" + "resultType": "google.api_core.operation.Operation", + "shortName": "revoke_dns_bind_permission" }, - "description": "Sample for ListPrivateClouds", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_sync.py", + "description": "Sample for RevokeDnsBindPermission", + "file": "vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateClouds_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -3954,22 +9685,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_clouds_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_sync.py" }, { "canonical": true, @@ -3979,22 +9710,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_connection_peering_routes", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.show_nsx_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateConnectionPeeringRoutes" + "shortName": "ShowNsxCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest" + "type": "google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -4010,22 +9741,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesAsyncPager", - "shortName": "list_private_connection_peering_routes" + "resultType": "google.cloud.vmwareengine_v1.types.Credentials", + "shortName": "show_nsx_credentials" }, - "description": "Sample for ListPrivateConnectionPeeringRoutes", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_async.py", + "description": "Sample for ShowNsxCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnectionPeeringRoutes_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowNsxCredentials_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4045,12 +9776,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_async.py" }, { "canonical": true, @@ -4059,22 +9790,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_connection_peering_routes", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.show_nsx_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnectionPeeringRoutes", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateConnectionPeeringRoutes" + "shortName": "ShowNsxCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionPeeringRoutesRequest" + "type": "google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -4090,22 +9821,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionPeeringRoutesPager", - "shortName": "list_private_connection_peering_routes" + "resultType": "google.cloud.vmwareengine_v1.types.Credentials", + "shortName": "show_nsx_credentials" }, - "description": "Sample for ListPrivateConnectionPeeringRoutes", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_sync.py", + "description": "Sample for ShowNsxCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnectionPeeringRoutes_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowNsxCredentials_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4125,12 +9856,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_connection_peering_routes_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_sync.py" }, { "canonical": true, @@ -4140,22 +9871,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_private_connections", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.show_vcenter_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateConnections" + "shortName": "ShowVcenterCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest" + "type": "google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -4171,22 +9902,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsAsyncPager", - "shortName": "list_private_connections" + "resultType": "google.cloud.vmwareengine_v1.types.Credentials", + "shortName": "show_vcenter_credentials" }, - "description": "Sample for ListPrivateConnections", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_connections_async.py", + "description": "Sample for ShowVcenterCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnections_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowVcenterCredentials_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4206,12 +9937,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_connections_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_async.py" }, { "canonical": true, @@ -4220,22 +9951,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_private_connections", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.show_vcenter_credentials", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListPrivateConnections", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListPrivateConnections" + "shortName": "ShowVcenterCredentials" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListPrivateConnectionsRequest" + "type": "google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest" }, { - "name": "parent", + "name": "private_cloud", "type": "str" }, { @@ -4251,22 +9982,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListPrivateConnectionsPager", - "shortName": "list_private_connections" + "resultType": "google.cloud.vmwareengine_v1.types.Credentials", + "shortName": "show_vcenter_credentials" }, - "description": "Sample for ListPrivateConnections", - "file": "vmwareengine_v1_generated_vmware_engine_list_private_connections_sync.py", + "description": "Sample for ShowVcenterCredentials", + "file": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListPrivateConnections_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowVcenterCredentials_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4286,12 +10017,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_private_connections_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_sync.py" }, { "canonical": true, @@ -4301,22 +10032,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_subnets", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.undelete_private_cloud", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListSubnets" + "shortName": "UndeletePrivateCloud" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListSubnetsRequest" + "type": "google.cloud.vmwareengine_v1.types.UndeletePrivateCloudRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4332,22 +10063,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsAsyncPager", - "shortName": "list_subnets" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "undelete_private_cloud" }, - "description": "Sample for ListSubnets", - "file": "vmwareengine_v1_generated_vmware_engine_list_subnets_async.py", + "description": "Sample for UndeletePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListSubnets_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UndeletePrivateCloud_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4362,17 +10093,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_subnets_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_async.py" }, { "canonical": true, @@ -4381,22 +10112,22 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_subnets", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.undelete_private_cloud", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListSubnets", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListSubnets" + "shortName": "UndeletePrivateCloud" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListSubnetsRequest" + "type": "google.cloud.vmwareengine_v1.types.UndeletePrivateCloudRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -4412,22 +10143,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListSubnetsPager", - "shortName": "list_subnets" + "resultType": "google.api_core.operation.Operation", + "shortName": "undelete_private_cloud" }, - "description": "Sample for ListSubnets", - "file": "vmwareengine_v1_generated_vmware_engine_list_subnets_sync.py", + "description": "Sample for UndeletePrivateCloud", + "file": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListSubnets_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UndeletePrivateCloud_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4442,17 +10173,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_subnets_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_sync.py" }, { "canonical": true, @@ -4462,23 +10193,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.list_vmware_engine_networks", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_cluster", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListVmwareEngineNetworks" + "shortName": "UpdateCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateClusterRequest" }, { - "name": "parent", - "type": "str" + "name": "cluster", + "type": "google.cloud.vmwareengine_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4493,22 +10228,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksAsyncPager", - "shortName": "list_vmware_engine_networks" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" }, - "description": "Sample for ListVmwareEngineNetworks", - "file": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_async.py", + "description": "Sample for UpdateCluster", + "file": "vmwareengine_v1_generated_vmware_engine_update_cluster_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListVmwareEngineNetworks_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateCluster_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4518,22 +10253,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_cluster_async.py" }, { "canonical": true, @@ -4542,23 +10277,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.list_vmware_engine_networks", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_cluster", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ListVmwareEngineNetworks", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ListVmwareEngineNetworks" + "shortName": "UpdateCluster" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ListVmwareEngineNetworksRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateClusterRequest" }, { - "name": "parent", - "type": "str" + "name": "cluster", + "type": "google.cloud.vmwareengine_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4573,22 +10312,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.services.vmware_engine.pagers.ListVmwareEngineNetworksPager", - "shortName": "list_vmware_engine_networks" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" }, - "description": "Sample for ListVmwareEngineNetworks", - "file": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_sync.py", + "description": "Sample for UpdateCluster", + "file": "vmwareengine_v1_generated_vmware_engine_update_cluster_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ListVmwareEngineNetworks_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateCluster_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4598,22 +10337,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_list_vmware_engine_networks_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_cluster_sync.py" }, { "canonical": true, @@ -4623,23 +10362,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.reset_nsx_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_dns_forwarding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ResetNsxCredentials" + "shortName": "UpdateDnsForwarding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateDnsForwardingRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "dns_forwarding", + "type": "google.cloud.vmwareengine_v1.types.DnsForwarding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4655,21 +10398,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reset_nsx_credentials" + "shortName": "update_dns_forwarding" }, - "description": "Sample for ResetNsxCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_async.py", + "description": "Sample for UpdateDnsForwarding", + "file": "vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetNsxCredentials_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_async", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4679,22 +10422,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_async.py" }, { "canonical": true, @@ -4703,23 +10446,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.reset_nsx_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_dns_forwarding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetNsxCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateDnsForwarding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ResetNsxCredentials" + "shortName": "UpdateDnsForwarding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ResetNsxCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateDnsForwardingRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "dns_forwarding", + "type": "google.cloud.vmwareengine_v1.types.DnsForwarding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4735,21 +10482,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "reset_nsx_credentials" + "shortName": "update_dns_forwarding" }, - "description": "Sample for ResetNsxCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_sync.py", + "description": "Sample for UpdateDnsForwarding", + "file": "vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetNsxCredentials_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_sync", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -4759,22 +10506,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_reset_nsx_credentials_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_sync.py" }, { "canonical": true, @@ -4784,23 +10531,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.reset_vcenter_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_external_access_rule", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ResetVcenterCredentials" + "shortName": "UpdateExternalAccessRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateExternalAccessRuleRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "external_access_rule", + "type": "google.cloud.vmwareengine_v1.types.ExternalAccessRule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4816,21 +10567,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reset_vcenter_credentials" + "shortName": "update_external_access_rule" }, - "description": "Sample for ResetVcenterCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_async.py", + "description": "Sample for UpdateExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_update_external_access_rule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetVcenterCredentials_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4840,22 +10591,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_external_access_rule_async.py" }, { "canonical": true, @@ -4864,23 +10615,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.reset_vcenter_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_external_access_rule", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ResetVcenterCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAccessRule", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ResetVcenterCredentials" + "shortName": "UpdateExternalAccessRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ResetVcenterCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateExternalAccessRuleRequest" + }, + { + "name": "external_access_rule", + "type": "google.cloud.vmwareengine_v1.types.ExternalAccessRule" }, { - "name": "private_cloud", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4896,21 +10651,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "reset_vcenter_credentials" + "shortName": "update_external_access_rule" }, - "description": "Sample for ResetVcenterCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_sync.py", + "description": "Sample for UpdateExternalAccessRule", + "file": "vmwareengine_v1_generated_vmware_engine_update_external_access_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ResetVcenterCredentials_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4920,22 +10675,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_reset_vcenter_credentials_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_external_access_rule_sync.py" }, { "canonical": true, @@ -4945,23 +10700,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.show_nsx_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_external_address", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ShowNsxCredentials" + "shortName": "UpdateExternalAddress" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateExternalAddressRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "external_address", + "type": "google.cloud.vmwareengine_v1.types.ExternalAddress" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -4976,22 +10735,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Credentials", - "shortName": "show_nsx_credentials" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_external_address" }, - "description": "Sample for ShowNsxCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_async.py", + "description": "Sample for UpdateExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_update_external_address_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowNsxCredentials_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -5001,22 +10760,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_external_address_async.py" }, { "canonical": true, @@ -5025,23 +10784,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.show_nsx_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_external_address", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowNsxCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateExternalAddress", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ShowNsxCredentials" + "shortName": "UpdateExternalAddress" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ShowNsxCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateExternalAddressRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "external_address", + "type": "google.cloud.vmwareengine_v1.types.ExternalAddress" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5056,22 +10819,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Credentials", - "shortName": "show_nsx_credentials" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_external_address" }, - "description": "Sample for ShowNsxCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_sync.py", + "description": "Sample for UpdateExternalAddress", + "file": "vmwareengine_v1_generated_vmware_engine_update_external_address_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowNsxCredentials_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -5081,22 +10844,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_show_nsx_credentials_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_external_address_sync.py" }, { "canonical": true, @@ -5106,23 +10869,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.show_vcenter_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_logging_server", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ShowVcenterCredentials" + "shortName": "UpdateLoggingServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateLoggingServerRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "logging_server", + "type": "google.cloud.vmwareengine_v1.types.LoggingServer" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5137,22 +10904,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Credentials", - "shortName": "show_vcenter_credentials" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_logging_server" }, - "description": "Sample for ShowVcenterCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_async.py", + "description": "Sample for UpdateLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_update_logging_server_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowVcenterCredentials_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_async", "segments": [ { - "end": 51, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 61, "start": 27, "type": "SHORT" }, @@ -5162,22 +10929,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_logging_server_async.py" }, { "canonical": true, @@ -5186,23 +10953,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.show_vcenter_credentials", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_logging_server", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.ShowVcenterCredentials", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateLoggingServer", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "ShowVcenterCredentials" + "shortName": "UpdateLoggingServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.ShowVcenterCredentialsRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateLoggingServerRequest" }, { - "name": "private_cloud", - "type": "str" + "name": "logging_server", + "type": "google.cloud.vmwareengine_v1.types.LoggingServer" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5217,22 +10988,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.vmwareengine_v1.types.Credentials", - "shortName": "show_vcenter_credentials" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_logging_server" }, - "description": "Sample for ShowVcenterCredentials", - "file": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_sync.py", + "description": "Sample for UpdateLoggingServer", + "file": "vmwareengine_v1_generated_vmware_engine_update_logging_server_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_ShowVcenterCredentials_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_sync", "segments": [ { - "end": 51, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 61, "start": 27, "type": "SHORT" }, @@ -5242,22 +11013,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_show_vcenter_credentials_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_logging_server_sync.py" }, { "canonical": true, @@ -5267,23 +11038,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.undelete_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "UndeletePrivateCloud" + "shortName": "UpdateManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.UndeletePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateManagementDnsZoneBindingRequest" }, { - "name": "name", - "type": "str" + "name": "management_dns_zone_binding", + "type": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5299,21 +11074,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "undelete_private_cloud" + "shortName": "update_management_dns_zone_binding" }, - "description": "Sample for UndeletePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_async.py", + "description": "Sample for UpdateManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_UndeletePrivateCloud_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_async", "segments": [ { - "end": 55, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 58, "start": 27, "type": "SHORT" }, @@ -5323,22 +11098,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_async.py" }, { "canonical": true, @@ -5347,23 +11122,27 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.undelete_private_cloud", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_management_dns_zone_binding", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UndeletePrivateCloud", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateManagementDnsZoneBinding", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "UndeletePrivateCloud" + "shortName": "UpdateManagementDnsZoneBinding" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.UndeletePrivateCloudRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateManagementDnsZoneBindingRequest" }, { - "name": "name", - "type": "str" + "name": "management_dns_zone_binding", + "type": "google.cloud.vmwareengine_v1.types.ManagementDnsZoneBinding" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -5379,21 +11158,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "undelete_private_cloud" + "shortName": "update_management_dns_zone_binding" }, - "description": "Sample for UndeletePrivateCloud", - "file": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_sync.py", + "description": "Sample for UpdateManagementDnsZoneBinding", + "file": "vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_UndeletePrivateCloud_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_sync", "segments": [ { - "end": 55, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 58, "start": 27, "type": "SHORT" }, @@ -5403,22 +11182,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_undelete_private_cloud_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_sync.py" }, { "canonical": true, @@ -5428,23 +11207,23 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient", "shortName": "VmwareEngineAsyncClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineAsyncClient.update_network_peering", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "UpdateCluster" + "shortName": "UpdateNetworkPeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.UpdateClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateNetworkPeeringRequest" }, { - "name": "cluster", - "type": "google.cloud.vmwareengine_v1.types.Cluster" + "name": "network_peering", + "type": "google.cloud.vmwareengine_v1.types.NetworkPeering" }, { "name": "update_mask", @@ -5464,21 +11243,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_cluster" + "shortName": "update_network_peering" }, - "description": "Sample for UpdateCluster", - "file": "vmwareengine_v1_generated_vmware_engine_update_cluster_async.py", + "description": "Sample for UpdateNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_update_network_peering_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateCluster_async", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_async", "segments": [ { - "end": 54, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5488,22 +11267,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_update_cluster_async.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_network_peering_async.py" }, { "canonical": true, @@ -5512,23 +11291,23 @@ "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient", "shortName": "VmwareEngineClient" }, - "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_cluster", + "fullName": "google.cloud.vmwareengine_v1.VmwareEngineClient.update_network_peering", "method": { - "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateCluster", + "fullName": "google.cloud.vmwareengine.v1.VmwareEngine.UpdateNetworkPeering", "service": { "fullName": "google.cloud.vmwareengine.v1.VmwareEngine", "shortName": "VmwareEngine" }, - "shortName": "UpdateCluster" + "shortName": "UpdateNetworkPeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.vmwareengine_v1.types.UpdateClusterRequest" + "type": "google.cloud.vmwareengine_v1.types.UpdateNetworkPeeringRequest" }, { - "name": "cluster", - "type": "google.cloud.vmwareengine_v1.types.Cluster" + "name": "network_peering", + "type": "google.cloud.vmwareengine_v1.types.NetworkPeering" }, { "name": "update_mask", @@ -5548,21 +11327,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_cluster" + "shortName": "update_network_peering" }, - "description": "Sample for UpdateCluster", - "file": "vmwareengine_v1_generated_vmware_engine_update_cluster_sync.py", + "description": "Sample for UpdateNetworkPeering", + "file": "vmwareengine_v1_generated_vmware_engine_update_network_peering_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateCluster_sync", + "regionTag": "vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_sync", "segments": [ { - "end": 54, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5572,22 +11351,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "vmwareengine_v1_generated_vmware_engine_update_cluster_sync.py" + "title": "vmwareengine_v1_generated_vmware_engine_update_network_peering_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_async.py new file mode 100644 index 000000000000..2dd165f780bf --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_create_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateExternalAccessRuleRequest( + parent="parent_value", + external_access_rule_id="external_access_rule_id_value", + ) + + # Make the request + operation = client.create_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_sync.py new file mode 100644 index 000000000000..2ebfed19e441 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_access_rule_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_create_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateExternalAccessRuleRequest( + parent="parent_value", + external_access_rule_id="external_access_rule_id_value", + ) + + # Make the request + operation = client.create_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateExternalAccessRule_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_async.py new file mode 100644 index 000000000000..9f4d88cd3447 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_create_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateExternalAddressRequest( + parent="parent_value", + external_address_id="external_address_id_value", + ) + + # Make the request + operation = client.create_external_address(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_sync.py new file mode 100644 index 000000000000..80b0605fad01 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_external_address_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_create_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.CreateExternalAddressRequest( + parent="parent_value", + external_address_id="external_address_id_value", + ) + + # Make the request + operation = client.create_external_address(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateExternalAddress_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_async.py new file mode 100644 index 000000000000..8bd99c3329b5 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_create_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.CreateLoggingServerRequest( + parent="parent_value", + logging_server=logging_server, + logging_server_id="logging_server_id_value", + ) + + # Make the request + operation = client.create_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_sync.py new file mode 100644 index 000000000000..36342f2f406b --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_logging_server_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_create_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.CreateLoggingServerRequest( + parent="parent_value", + logging_server=logging_server, + logging_server_id="logging_server_id_value", + ) + + # Make the request + operation = client.create_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateLoggingServer_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_async.py new file mode 100644 index 000000000000..ccf09f910234 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_create_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.CreateManagementDnsZoneBindingRequest( + parent="parent_value", + management_dns_zone_binding=management_dns_zone_binding, + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Make the request + operation = client.create_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_sync.py new file mode 100644 index 000000000000..d92cd95d080d --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_management_dns_zone_binding_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_create_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.CreateManagementDnsZoneBindingRequest( + parent="parent_value", + management_dns_zone_binding=management_dns_zone_binding, + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Make the request + operation = client.create_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateManagementDnsZoneBinding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_async.py new file mode 100644 index 000000000000..1f163fa2f3a4 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_create_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.CreateNetworkPeeringRequest( + parent="parent_value", + network_peering_id="network_peering_id_value", + network_peering=network_peering, + ) + + # Make the request + operation = client.create_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_sync.py new file mode 100644 index 000000000000..09fecccc0e50 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_network_peering_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_create_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.CreateNetworkPeeringRequest( + parent="parent_value", + network_peering_id="network_peering_id_value", + network_peering=network_peering, + ) + + # Make the request + operation = client.create_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_CreateNetworkPeering_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py index b2e71ea7bfde..5119535f0626 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_async.py @@ -40,7 +40,7 @@ async def sample_create_vmware_engine_network(): # Initialize request argument(s) vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" + vmware_engine_network.type_ = "STANDARD" request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( parent="parent_value", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py index 1ddde91e2f5e..4acdac4f02bd 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_create_vmware_engine_network_sync.py @@ -40,7 +40,7 @@ def sample_create_vmware_engine_network(): # Initialize request argument(s) vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" + vmware_engine_network.type_ = "STANDARD" request = vmwareengine_v1.CreateVmwareEngineNetworkRequest( parent="parent_value", diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_async.py new file mode 100644 index 000000000000..c282403dfaa5 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_delete_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteExternalAccessRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_sync.py new file mode 100644 index 000000000000..231cf0644b7f --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_access_rule_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_delete_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteExternalAccessRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteExternalAccessRule_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_async.py new file mode 100644 index 000000000000..602a9954d59c --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_delete_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteExternalAddressRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_external_address(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_sync.py new file mode 100644 index 000000000000..016ffa4a8bab --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_external_address_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_delete_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteExternalAddressRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_external_address(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteExternalAddress_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_async.py new file mode 100644 index 000000000000..0ccc8fe2000f --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_delete_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteLoggingServerRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_sync.py new file mode 100644 index 000000000000..40bbd0e8e2f9 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_logging_server_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_delete_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteLoggingServerRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteLoggingServer_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_async.py new file mode 100644 index 000000000000..d95f0811876d --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_delete_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_sync.py new file mode 100644 index 000000000000..37a2d6b948a6 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_management_dns_zone_binding_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_delete_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteManagementDnsZoneBinding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_async.py new file mode 100644 index 000000000000..4d97309cc211 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_delete_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_sync.py new file mode 100644 index 000000000000..0aed1cadb1d2 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_delete_network_peering_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_delete_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.DeleteNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_DeleteNetworkPeering_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_async.py new file mode 100644 index 000000000000..440735f94f60 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchNetworkPolicyExternalAddresses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_fetch_network_policy_external_addresses(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.FetchNetworkPolicyExternalAddressesRequest( + network_policy="network_policy_value", + ) + + # Make the request + page_result = client.fetch_network_policy_external_addresses(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_sync.py new file mode 100644 index 000000000000..809dc94cdd05 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_fetch_network_policy_external_addresses_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchNetworkPolicyExternalAddresses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_fetch_network_policy_external_addresses(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.FetchNetworkPolicyExternalAddressesRequest( + network_policy="network_policy_value", + ) + + # Make the request + page_result = client.fetch_network_policy_external_addresses(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_FetchNetworkPolicyExternalAddresses_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_async.py new file mode 100644 index 000000000000..92ff3df4be79 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsBindPermissionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_bind_permission(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_sync.py new file mode 100644 index 000000000000..09e3d87611e3 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_bind_permission_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsBindPermissionRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_bind_permission(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetDnsBindPermission_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_async.py new file mode 100644 index 000000000000..fd904a9d82fb --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsForwarding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsForwardingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_forwarding(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_sync.py new file mode 100644 index 000000000000..52e066feefb0 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_dns_forwarding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsForwarding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetDnsForwardingRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_forwarding(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetDnsForwarding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_async.py new file mode 100644 index 000000000000..d6eb1cc79cd1 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetExternalAccessRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_external_access_rule(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_sync.py new file mode 100644 index 000000000000..c0714351657e --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_access_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetExternalAccessRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_external_access_rule(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetExternalAccessRule_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_async.py new file mode 100644 index 000000000000..3b6a3e2a097e --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetExternalAddressRequest( + name="name_value", + ) + + # Make the request + response = await client.get_external_address(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_sync.py new file mode 100644 index 000000000000..5ec35a886343 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_external_address_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetExternalAddressRequest( + name="name_value", + ) + + # Make the request + response = client.get_external_address(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetExternalAddress_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_async.py new file mode 100644 index 000000000000..5abc566ced57 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetLoggingServerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_logging_server(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_sync.py new file mode 100644 index 000000000000..eed4e7d76543 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_logging_server_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetLoggingServerRequest( + name="name_value", + ) + + # Make the request + response = client.get_logging_server(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetLoggingServer_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_async.py new file mode 100644 index 000000000000..3b2a3837713a --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_management_dns_zone_binding(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_sync.py new file mode 100644 index 000000000000..c763f04d198b --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_management_dns_zone_binding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_management_dns_zone_binding(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetManagementDnsZoneBinding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_async.py new file mode 100644 index 000000000000..16250b3e13be --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_network_peering(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_sync.py new file mode 100644 index 000000000000..707e2d12ce6d --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_network_peering_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNetworkPeeringRequest( + name="name_value", + ) + + # Make the request + response = client.get_network_peering(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetNetworkPeering_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_async.py new file mode 100644 index 000000000000..d0cdb19659f6 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNode +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetNode_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_get_node(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNodeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_node(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetNode_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_sync.py new file mode 100644 index 000000000000..ac80093a2bce --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_get_node_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNode +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GetNode_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_get_node(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.GetNodeRequest( + name="name_value", + ) + + # Make the request + response = client.get_node(request=request) + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GetNode_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_async.py new file mode 100644 index 000000000000..214107f9ccf1 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GrantDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_grant_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.GrantDnsBindPermissionRequest( + name="name_value", + principal=principal, + ) + + # Make the request + operation = client.grant_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_sync.py new file mode 100644 index 000000000000..1515e37d0e2a --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_grant_dns_bind_permission_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GrantDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_grant_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.GrantDnsBindPermissionRequest( + name="name_value", + principal=principal, + ) + + # Make the request + operation = client.grant_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_GrantDnsBindPermission_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_async.py new file mode 100644 index 000000000000..db6b9df0924b --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExternalAccessRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_external_access_rules(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListExternalAccessRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_access_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_sync.py new file mode 100644 index 000000000000..88419a4d3ad2 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_access_rules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExternalAccessRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_external_access_rules(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListExternalAccessRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_access_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListExternalAccessRules_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_async.py new file mode 100644 index 000000000000..7396ec1dd398 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExternalAddresses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_external_addresses(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListExternalAddressesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_addresses(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_sync.py new file mode 100644 index 000000000000..6888d2630315 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_external_addresses_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExternalAddresses +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_external_addresses(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListExternalAddressesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_external_addresses(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListExternalAddresses_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_async.py new file mode 100644 index 000000000000..63075452a246 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLoggingServers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_logging_servers(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListLoggingServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logging_servers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_sync.py new file mode 100644 index 000000000000..816d05e0a648 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_logging_servers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLoggingServers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_logging_servers(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListLoggingServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logging_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListLoggingServers_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_async.py new file mode 100644 index 000000000000..4b15178da7cf --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListManagementDnsZoneBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_management_dns_zone_bindings(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListManagementDnsZoneBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_management_dns_zone_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_sync.py new file mode 100644 index 000000000000..db7e7414e93f --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_management_dns_zone_bindings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListManagementDnsZoneBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_management_dns_zone_bindings(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListManagementDnsZoneBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_management_dns_zone_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListManagementDnsZoneBindings_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_async.py new file mode 100644 index 000000000000..317fe0f48ba6 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNetworkPeerings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_network_peerings(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPeeringsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_peerings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_sync.py new file mode 100644 index 000000000000..6403fbdbb037 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_network_peerings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNetworkPeerings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_network_peerings(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNetworkPeeringsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_network_peerings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListNetworkPeerings_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_async.py new file mode 100644 index 000000000000..8fd00b319a89 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListNodes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_nodes(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_nodes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListNodes_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_sync.py new file mode 100644 index 000000000000..acbf96c8911a --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_nodes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_nodes(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListNodes_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_async.py new file mode 100644 index 000000000000..3228399dcc7b --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPeeringRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_list_peering_routes(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListPeeringRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_sync.py new file mode 100644 index 000000000000..50501ab2b1ea --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_list_peering_routes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPeeringRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_list_peering_routes(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.ListPeeringRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_ListPeeringRoutes_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_async.py new file mode 100644 index 000000000000..41cc3d0527e7 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RepairManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_repair_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.RepairManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.repair_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_sync.py new file mode 100644 index 000000000000..89a44929fa58 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_repair_management_dns_zone_binding_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RepairManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_repair_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.RepairManagementDnsZoneBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.repair_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_RepairManagementDnsZoneBinding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_async.py new file mode 100644 index 000000000000..e7d80dc00dba --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RevokeDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_revoke_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.RevokeDnsBindPermissionRequest( + name="name_value", + principal=principal, + ) + + # Make the request + operation = client.revoke_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_sync.py new file mode 100644 index 000000000000..657736bf1728 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_revoke_dns_bind_permission_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RevokeDnsBindPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_revoke_dns_bind_permission(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + principal = vmwareengine_v1.Principal() + principal.user = "user_value" + + request = vmwareengine_v1.RevokeDnsBindPermissionRequest( + name="name_value", + principal=principal, + ) + + # Make the request + operation = client.revoke_dns_bind_permission(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_RevokeDnsBindPermission_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_async.py new file mode 100644 index 000000000000..4d96b5b578cf --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsForwarding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + dns_forwarding = vmwareengine_v1.DnsForwarding() + dns_forwarding.forwarding_rules.domain = "domain_value" + dns_forwarding.forwarding_rules.name_servers = ['name_servers_value1', 'name_servers_value2'] + + request = vmwareengine_v1.UpdateDnsForwardingRequest( + dns_forwarding=dns_forwarding, + ) + + # Make the request + operation = client.update_dns_forwarding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_sync.py new file mode 100644 index 000000000000..6a9202926733 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_dns_forwarding_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsForwarding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_dns_forwarding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + dns_forwarding = vmwareengine_v1.DnsForwarding() + dns_forwarding.forwarding_rules.domain = "domain_value" + dns_forwarding.forwarding_rules.name_servers = ['name_servers_value1', 'name_servers_value2'] + + request = vmwareengine_v1.UpdateDnsForwardingRequest( + dns_forwarding=dns_forwarding, + ) + + # Make the request + operation = client.update_dns_forwarding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateDnsForwarding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_async.py new file mode 100644 index 000000000000..c3ae40424a8d --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.UpdateExternalAccessRuleRequest( + ) + + # Make the request + operation = client.update_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_sync.py new file mode 100644 index 000000000000..4753fb42d65a --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_access_rule_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExternalAccessRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_external_access_rule(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.UpdateExternalAccessRuleRequest( + ) + + # Make the request + operation = client.update_external_access_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateExternalAccessRule_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_async.py new file mode 100644 index 000000000000..f623b45b0f59 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + request = vmwareengine_v1.UpdateExternalAddressRequest( + ) + + # Make the request + operation = client.update_external_address(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_sync.py new file mode 100644 index 000000000000..b71436b3aea7 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_external_address_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExternalAddress +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_external_address(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + request = vmwareengine_v1.UpdateExternalAddressRequest( + ) + + # Make the request + operation = client.update_external_address(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateExternalAddress_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_async.py new file mode 100644 index 000000000000..4e35dba83995 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.UpdateLoggingServerRequest( + logging_server=logging_server, + ) + + # Make the request + operation = client.update_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_sync.py new file mode 100644 index 000000000000..f350c94ec130 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_logging_server_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLoggingServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_logging_server(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + logging_server = vmwareengine_v1.LoggingServer() + logging_server.hostname = "hostname_value" + logging_server.port = 453 + logging_server.protocol = "TCP" + logging_server.source_type = "VCSA" + + request = vmwareengine_v1.UpdateLoggingServerRequest( + logging_server=logging_server, + ) + + # Make the request + operation = client.update_logging_server(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateLoggingServer_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_async.py new file mode 100644 index 000000000000..8f862392f26f --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.UpdateManagementDnsZoneBindingRequest( + management_dns_zone_binding=management_dns_zone_binding, + ) + + # Make the request + operation = client.update_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_sync.py new file mode 100644 index 000000000000..4f189b810acc --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_management_dns_zone_binding_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateManagementDnsZoneBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_management_dns_zone_binding(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + management_dns_zone_binding = vmwareengine_v1.ManagementDnsZoneBinding() + management_dns_zone_binding.vpc_network = "vpc_network_value" + + request = vmwareengine_v1.UpdateManagementDnsZoneBindingRequest( + management_dns_zone_binding=management_dns_zone_binding, + ) + + # Make the request + operation = client.update_management_dns_zone_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateManagementDnsZoneBinding_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_async.py new file mode 100644 index 000000000000..9fe11cb60abc --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +async def sample_update_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineAsyncClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.UpdateNetworkPeeringRequest( + network_peering=network_peering, + ) + + # Make the request + operation = client.update_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_async] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_sync.py new file mode 100644 index 000000000000..12125daf23f4 --- /dev/null +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_network_peering_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateNetworkPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-vmwareengine + + +# [START vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import vmwareengine_v1 + + +def sample_update_network_peering(): + # Create a client + client = vmwareengine_v1.VmwareEngineClient() + + # Initialize request argument(s) + network_peering = vmwareengine_v1.NetworkPeering() + network_peering.peer_network = "peer_network_value" + network_peering.peer_network_type = "DELL_POWERSCALE" + network_peering.vmware_engine_network = "vmware_engine_network_value" + + request = vmwareengine_v1.UpdateNetworkPeeringRequest( + network_peering=network_peering, + ) + + # Make the request + operation = client.update_network_peering(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END vmwareengine_v1_generated_VmwareEngine_UpdateNetworkPeering_sync] diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_async.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_async.py index 3de9e6380e27..2e229e279107 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_async.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_async.py @@ -40,7 +40,7 @@ async def sample_update_vmware_engine_network(): # Initialize request argument(s) vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" + vmware_engine_network.type_ = "STANDARD" request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( vmware_engine_network=vmware_engine_network, diff --git a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_sync.py b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_sync.py index 4295b5f96a6d..da9499845895 100644 --- a/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_sync.py +++ b/packages/google-cloud-vmwareengine/samples/generated_samples/vmwareengine_v1_generated_vmware_engine_update_vmware_engine_network_sync.py @@ -40,7 +40,7 @@ def sample_update_vmware_engine_network(): # Initialize request argument(s) vmware_engine_network = vmwareengine_v1.VmwareEngineNetwork() - vmware_engine_network.type_ = "LEGACY" + vmware_engine_network.type_ = "STANDARD" request = vmwareengine_v1.UpdateVmwareEngineNetworkRequest( vmware_engine_network=vmware_engine_network, diff --git a/packages/google-cloud-vmwareengine/scripts/fixup_vmwareengine_v1_keywords.py b/packages/google-cloud-vmwareengine/scripts/fixup_vmwareengine_v1_keywords.py index ba1fce5c62b1..ca63e9a59a38 100644 --- a/packages/google-cloud-vmwareengine/scripts/fixup_vmwareengine_v1_keywords.py +++ b/packages/google-cloud-vmwareengine/scripts/fixup_vmwareengine_v1_keywords.py @@ -40,39 +40,74 @@ class vmwareengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_external_access_rule': ('parent', 'external_access_rule', 'external_access_rule_id', 'request_id', ), + 'create_external_address': ('parent', 'external_address', 'external_address_id', 'request_id', ), 'create_hcx_activation_key': ('parent', 'hcx_activation_key', 'hcx_activation_key_id', 'request_id', ), + 'create_logging_server': ('parent', 'logging_server', 'logging_server_id', 'request_id', ), + 'create_management_dns_zone_binding': ('parent', 'management_dns_zone_binding', 'management_dns_zone_binding_id', 'request_id', ), + 'create_network_peering': ('parent', 'network_peering_id', 'network_peering', 'request_id', ), 'create_network_policy': ('parent', 'network_policy_id', 'network_policy', 'request_id', ), 'create_private_cloud': ('parent', 'private_cloud_id', 'private_cloud', 'request_id', 'validate_only', ), 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', ), 'create_vmware_engine_network': ('parent', 'vmware_engine_network_id', 'vmware_engine_network', 'request_id', ), 'delete_cluster': ('name', 'request_id', ), + 'delete_external_access_rule': ('name', 'request_id', ), + 'delete_external_address': ('name', 'request_id', ), + 'delete_logging_server': ('name', 'request_id', ), + 'delete_management_dns_zone_binding': ('name', 'request_id', ), + 'delete_network_peering': ('name', 'request_id', ), 'delete_network_policy': ('name', 'request_id', ), 'delete_private_cloud': ('name', 'request_id', 'force', 'delay_hours', ), 'delete_private_connection': ('name', 'request_id', ), 'delete_vmware_engine_network': ('name', 'request_id', 'etag', ), + 'fetch_network_policy_external_addresses': ('network_policy', 'page_size', 'page_token', ), 'get_cluster': ('name', ), + 'get_dns_bind_permission': ('name', ), + 'get_dns_forwarding': ('name', ), + 'get_external_access_rule': ('name', ), + 'get_external_address': ('name', ), 'get_hcx_activation_key': ('name', ), + 'get_logging_server': ('name', ), + 'get_management_dns_zone_binding': ('name', ), + 'get_network_peering': ('name', ), 'get_network_policy': ('name', ), + 'get_node': ('name', ), 'get_node_type': ('name', ), 'get_private_cloud': ('name', ), 'get_private_connection': ('name', ), 'get_subnet': ('name', ), 'get_vmware_engine_network': ('name', ), + 'grant_dns_bind_permission': ('name', 'principal', 'request_id', ), 'list_clusters': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_external_access_rules': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_external_addresses': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_hcx_activation_keys': ('parent', 'page_size', 'page_token', ), + 'list_logging_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_management_dns_zone_bindings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_network_peerings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_network_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_nodes': ('parent', 'page_size', 'page_token', ), 'list_node_types': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_peering_routes': ('parent', 'page_size', 'page_token', 'filter', ), 'list_private_clouds': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_private_connection_peering_routes': ('parent', 'page_size', 'page_token', ), 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_subnets': ('parent', 'page_size', 'page_token', ), 'list_vmware_engine_networks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'repair_management_dns_zone_binding': ('name', 'request_id', ), 'reset_nsx_credentials': ('private_cloud', 'request_id', ), - 'reset_vcenter_credentials': ('private_cloud', 'request_id', ), + 'reset_vcenter_credentials': ('private_cloud', 'request_id', 'username', ), + 'revoke_dns_bind_permission': ('name', 'principal', 'request_id', ), 'show_nsx_credentials': ('private_cloud', ), - 'show_vcenter_credentials': ('private_cloud', ), + 'show_vcenter_credentials': ('private_cloud', 'username', ), 'undelete_private_cloud': ('name', 'request_id', ), 'update_cluster': ('update_mask', 'cluster', 'request_id', 'validate_only', ), + 'update_dns_forwarding': ('dns_forwarding', 'update_mask', 'request_id', ), + 'update_external_access_rule': ('update_mask', 'external_access_rule', 'request_id', ), + 'update_external_address': ('update_mask', 'external_address', 'request_id', ), + 'update_logging_server': ('update_mask', 'logging_server', 'request_id', ), + 'update_management_dns_zone_binding': ('update_mask', 'management_dns_zone_binding', 'request_id', ), + 'update_network_peering': ('network_peering', 'update_mask', 'request_id', ), 'update_network_policy': ('network_policy', 'update_mask', 'request_id', ), 'update_private_cloud': ('private_cloud', 'update_mask', 'request_id', ), 'update_private_connection': ('private_connection', 'update_mask', 'request_id', ), diff --git a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py index df7df1b9624c..966d9c46c5c3 100644 --- a/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py +++ b/packages/google-cloud-vmwareengine/tests/unit/gapic/vmwareengine_v1/test_vmware_engine.py @@ -3790,11 +3790,11 @@ async def test_delete_cluster_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListSubnetsRequest, + vmwareengine.ListNodesRequest, dict, ], ) -def test_list_subnets(request_type, transport: str = "grpc"): +def test_list_nodes(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3805,26 +3805,24 @@ def test_list_subnets(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListSubnetsResponse( + call.return_value = vmwareengine.ListNodesResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) - response = client.list_subnets(request) + response = client.list_nodes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListSubnetsRequest() + assert args[0] == vmwareengine.ListNodesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubnetsPager) + assert isinstance(response, pagers.ListNodesPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] -def test_list_subnets_empty_call(): +def test_list_nodes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -3833,16 +3831,16 @@ def test_list_subnets_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: - client.list_subnets() + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: + client.list_nodes() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListSubnetsRequest() + assert args[0] == vmwareengine.ListNodesRequest() @pytest.mark.asyncio -async def test_list_subnets_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.ListSubnetsRequest +async def test_list_nodes_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ListNodesRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3854,47 +3852,45 @@ async def test_list_subnets_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListSubnetsResponse( + vmwareengine.ListNodesResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) ) - response = await client.list_subnets(request) + response = await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListSubnetsRequest() + assert args[0] == vmwareengine.ListNodesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubnetsAsyncPager) + assert isinstance(response, pagers.ListNodesAsyncPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_subnets_async_from_dict(): - await test_list_subnets_async(request_type=dict) +async def test_list_nodes_async_from_dict(): + await test_list_nodes_async(request_type=dict) -def test_list_subnets_field_headers(): +def test_list_nodes_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListSubnetsRequest() + request = vmwareengine.ListNodesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: - call.return_value = vmwareengine.ListSubnetsResponse() - client.list_subnets(request) + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: + call.return_value = vmwareengine.ListNodesResponse() + client.list_nodes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3910,23 +3906,23 @@ def test_list_subnets_field_headers(): @pytest.mark.asyncio -async def test_list_subnets_field_headers_async(): +async def test_list_nodes_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListSubnetsRequest() + request = vmwareengine.ListNodesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListSubnetsResponse() + vmwareengine.ListNodesResponse() ) - await client.list_subnets(request) + await client.list_nodes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3941,18 +3937,18 @@ async def test_list_subnets_field_headers_async(): ) in kw["metadata"] -def test_list_subnets_flattened(): +def test_list_nodes_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListSubnetsResponse() + call.return_value = vmwareengine.ListNodesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_subnets( + client.list_nodes( parent="parent_value", ) @@ -3965,7 +3961,7 @@ def test_list_subnets_flattened(): assert arg == mock_val -def test_list_subnets_flattened_error(): +def test_list_nodes_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3973,29 +3969,29 @@ def test_list_subnets_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_subnets( - vmwareengine.ListSubnetsRequest(), + client.list_nodes( + vmwareengine.ListNodesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_subnets_flattened_async(): +async def test_list_nodes_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListSubnetsResponse() + call.return_value = vmwareengine.ListNodesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListSubnetsResponse() + vmwareengine.ListNodesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_subnets( + response = await client.list_nodes( parent="parent_value", ) @@ -4009,7 +4005,7 @@ async def test_list_subnets_flattened_async(): @pytest.mark.asyncio -async def test_list_subnets_flattened_error_async(): +async def test_list_nodes_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4017,44 +4013,44 @@ async def test_list_subnets_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_subnets( - vmwareengine.ListSubnetsRequest(), + await client.list_nodes( + vmwareengine.ListNodesRequest(), parent="parent_value", ) -def test_list_subnets_pager(transport_name: str = "grpc"): +def test_list_nodes_pager(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], next_page_token="abc", ), - vmwareengine.ListSubnetsResponse( - subnets=[], + vmwareengine.ListNodesResponse( + nodes=[], next_page_token="def", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), ], next_page_token="ghi", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], ), RuntimeError, @@ -4064,95 +4060,95 @@ def test_list_subnets_pager(transport_name: str = "grpc"): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_subnets(request={}) + pager = client.list_nodes(request={}) assert pager._metadata == metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) + assert all(isinstance(i, vmwareengine_resources.Node) for i in results) -def test_list_subnets_pages(transport_name: str = "grpc"): +def test_list_nodes_pages(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + with mock.patch.object(type(client.transport.list_nodes), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], next_page_token="abc", ), - vmwareengine.ListSubnetsResponse( - subnets=[], + vmwareengine.ListNodesResponse( + nodes=[], next_page_token="def", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), ], next_page_token="ghi", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], ), RuntimeError, ) - pages = list(client.list_subnets(request={}).pages) + pages = list(client.list_nodes(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_subnets_async_pager(): +async def test_list_nodes_async_pager(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subnets), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_nodes), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], next_page_token="abc", ), - vmwareengine.ListSubnetsResponse( - subnets=[], + vmwareengine.ListNodesResponse( + nodes=[], next_page_token="def", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), ], next_page_token="ghi", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], ), RuntimeError, ) - async_pager = await client.list_subnets( + async_pager = await client.list_nodes( request={}, ) assert async_pager.next_page_token == "abc" @@ -4161,43 +4157,43 @@ async def test_list_subnets_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine_resources.Subnet) for i in responses) + assert all(isinstance(i, vmwareengine_resources.Node) for i in responses) @pytest.mark.asyncio -async def test_list_subnets_async_pages(): +async def test_list_nodes_async_pages(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_subnets), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_nodes), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], next_page_token="abc", ), - vmwareengine.ListSubnetsResponse( - subnets=[], + vmwareengine.ListNodesResponse( + nodes=[], next_page_token="def", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), ], next_page_token="ghi", ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), ], ), RuntimeError, @@ -4206,7 +4202,7 @@ async def test_list_subnets_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_subnets(request={}) + await client.list_nodes(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -4216,11 +4212,11 @@ async def test_list_subnets_async_pages(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetSubnetRequest, + vmwareengine.GetNodeRequest, dict, ], ) -def test_get_subnet(request_type, transport: str = "grpc"): +def test_get_node(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4231,32 +4227,36 @@ def test_get_subnet(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + with mock.patch.object(type(client.transport.get_node), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Subnet( + call.return_value = vmwareengine_resources.Node( name="name_value", - ip_cidr_range="ip_cidr_range_value", - gateway_ip="gateway_ip_value", - type_="type__value", - state=vmwareengine_resources.Subnet.State.ACTIVE, + fqdn="fqdn_value", + internal_ip="internal_ip_value", + node_type_id="node_type_id_value", + version="version_value", + custom_core_count=1835, + state=vmwareengine_resources.Node.State.ACTIVE, ) - response = client.get_subnet(request) + response = client.get_node(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetSubnetRequest() + assert args[0] == vmwareengine.GetNodeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Subnet) + assert isinstance(response, vmwareengine_resources.Node) assert response.name == "name_value" - assert response.ip_cidr_range == "ip_cidr_range_value" - assert response.gateway_ip == "gateway_ip_value" - assert response.type_ == "type__value" - assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert response.fqdn == "fqdn_value" + assert response.internal_ip == "internal_ip_value" + assert response.node_type_id == "node_type_id_value" + assert response.version == "version_value" + assert response.custom_core_count == 1835 + assert response.state == vmwareengine_resources.Node.State.ACTIVE -def test_get_subnet_empty_call(): +def test_get_node_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -4265,16 +4265,16 @@ def test_get_subnet_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: - client.get_subnet() + with mock.patch.object(type(client.transport.get_node), "__call__") as call: + client.get_node() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetSubnetRequest() + assert args[0] == vmwareengine.GetNodeRequest() @pytest.mark.asyncio -async def test_get_subnet_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.GetSubnetRequest +async def test_get_node_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetNodeRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4286,53 +4286,57 @@ async def test_get_subnet_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + with mock.patch.object(type(client.transport.get_node), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Subnet( + vmwareengine_resources.Node( name="name_value", - ip_cidr_range="ip_cidr_range_value", - gateway_ip="gateway_ip_value", - type_="type__value", - state=vmwareengine_resources.Subnet.State.ACTIVE, + fqdn="fqdn_value", + internal_ip="internal_ip_value", + node_type_id="node_type_id_value", + version="version_value", + custom_core_count=1835, + state=vmwareengine_resources.Node.State.ACTIVE, ) ) - response = await client.get_subnet(request) + response = await client.get_node(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetSubnetRequest() + assert args[0] == vmwareengine.GetNodeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Subnet) + assert isinstance(response, vmwareengine_resources.Node) assert response.name == "name_value" - assert response.ip_cidr_range == "ip_cidr_range_value" - assert response.gateway_ip == "gateway_ip_value" - assert response.type_ == "type__value" - assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert response.fqdn == "fqdn_value" + assert response.internal_ip == "internal_ip_value" + assert response.node_type_id == "node_type_id_value" + assert response.version == "version_value" + assert response.custom_core_count == 1835 + assert response.state == vmwareengine_resources.Node.State.ACTIVE @pytest.mark.asyncio -async def test_get_subnet_async_from_dict(): - await test_get_subnet_async(request_type=dict) +async def test_get_node_async_from_dict(): + await test_get_node_async(request_type=dict) -def test_get_subnet_field_headers(): +def test_get_node_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetSubnetRequest() + request = vmwareengine.GetNodeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: - call.return_value = vmwareengine_resources.Subnet() - client.get_subnet(request) + with mock.patch.object(type(client.transport.get_node), "__call__") as call: + call.return_value = vmwareengine_resources.Node() + client.get_node(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4348,23 +4352,23 @@ def test_get_subnet_field_headers(): @pytest.mark.asyncio -async def test_get_subnet_field_headers_async(): +async def test_get_node_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetSubnetRequest() + request = vmwareengine.GetNodeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + with mock.patch.object(type(client.transport.get_node), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Subnet() + vmwareengine_resources.Node() ) - await client.get_subnet(request) + await client.get_node(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4379,18 +4383,18 @@ async def test_get_subnet_field_headers_async(): ) in kw["metadata"] -def test_get_subnet_flattened(): +def test_get_node_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + with mock.patch.object(type(client.transport.get_node), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Subnet() + call.return_value = vmwareengine_resources.Node() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_subnet( + client.get_node( name="name_value", ) @@ -4403,7 +4407,7 @@ def test_get_subnet_flattened(): assert arg == mock_val -def test_get_subnet_flattened_error(): +def test_get_node_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4411,29 +4415,29 @@ def test_get_subnet_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_subnet( - vmwareengine.GetSubnetRequest(), + client.get_node( + vmwareengine.GetNodeRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_subnet_flattened_async(): +async def test_get_node_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + with mock.patch.object(type(client.transport.get_node), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Subnet() + call.return_value = vmwareengine_resources.Node() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Subnet() + vmwareengine_resources.Node() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_subnet( + response = await client.get_node( name="name_value", ) @@ -4447,7 +4451,7 @@ async def test_get_subnet_flattened_async(): @pytest.mark.asyncio -async def test_get_subnet_flattened_error_async(): +async def test_get_node_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4455,8 +4459,8 @@ async def test_get_subnet_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_subnet( - vmwareengine.GetSubnetRequest(), + await client.get_node( + vmwareengine.GetNodeRequest(), name="name_value", ) @@ -4464,11 +4468,11 @@ async def test_get_subnet_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateSubnetRequest, + vmwareengine.ListExternalAddressesRequest, dict, ], ) -def test_update_subnet(request_type, transport: str = "grpc"): +def test_list_external_addresses(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4479,21 +4483,28 @@ def test_update_subnet(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_subnet(request) + call.return_value = vmwareengine.ListExternalAddressesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateSubnetRequest() + assert args[0] == vmwareengine.ListExternalAddressesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListExternalAddressesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_subnet_empty_call(): +def test_list_external_addresses_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -4502,16 +4513,19 @@ def test_update_subnet_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: - client.update_subnet() + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: + client.list_external_addresses() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateSubnetRequest() + assert args[0] == vmwareengine.ListExternalAddressesRequest() @pytest.mark.asyncio -async def test_update_subnet_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.UpdateSubnetRequest +async def test_list_external_addresses_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListExternalAddressesRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4523,42 +4537,51 @@ async def test_update_subnet_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListExternalAddressesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_subnet(request) + response = await client.list_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateSubnetRequest() + assert args[0] == vmwareengine.ListExternalAddressesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListExternalAddressesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_subnet_async_from_dict(): - await test_update_subnet_async(request_type=dict) +async def test_list_external_addresses_async_from_dict(): + await test_list_external_addresses_async(request_type=dict) -def test_update_subnet_field_headers(): +def test_list_external_addresses_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateSubnetRequest() + request = vmwareengine.ListExternalAddressesRequest() - request.subnet.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_subnet(request) + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: + call.return_value = vmwareengine.ListExternalAddressesResponse() + client.list_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4569,28 +4592,30 @@ def test_update_subnet_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subnet.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_subnet_field_headers_async(): +async def test_list_external_addresses_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateSubnetRequest() + request = vmwareengine.ListExternalAddressesRequest() - request.subnet.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine.ListExternalAddressesResponse() ) - await client.update_subnet(request) + await client.list_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4601,39 +4626,37 @@ async def test_update_subnet_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subnet.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_subnet_flattened(): +def test_list_external_addresses_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListExternalAddressesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_subnet( - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_external_addresses( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].subnet - mock_val = vmwareengine_resources.Subnet(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_subnet_flattened_error(): +def test_list_external_addresses_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4641,48 +4664,45 @@ def test_update_subnet_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_subnet( - vmwareengine.UpdateSubnetRequest(), - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_external_addresses( + vmwareengine.ListExternalAddressesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_subnet_flattened_async(): +async def test_list_external_addresses_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListExternalAddressesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListExternalAddressesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_subnet( - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_external_addresses( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].subnet - mock_val = vmwareengine_resources.Subnet(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_subnet_flattened_error_async(): +async def test_list_external_addresses_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4690,21 +4710,222 @@ async def test_update_subnet_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_subnet( - vmwareengine.UpdateSubnetRequest(), - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_external_addresses( + vmwareengine.ListExternalAddressesRequest(), + parent="parent_value", + ) + + +def test_list_external_addresses_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_external_addresses(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in results + ) + + +def test_list_external_addresses_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_addresses), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + RuntimeError, + ) + pages = list(client.list_external_addresses(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_external_addresses_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_addresses), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_external_addresses( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_external_addresses_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_addresses), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_external_addresses(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListNodeTypesRequest, + vmwareengine.FetchNetworkPolicyExternalAddressesRequest, dict, ], ) -def test_list_node_types(request_type, transport: str = "grpc"): +def test_fetch_network_policy_external_addresses(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4715,26 +4936,26 @@ def test_list_node_types(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNodeTypesResponse( + call.return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) - response = client.list_node_types(request) + response = client.fetch_network_policy_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNodeTypesRequest() + assert args[0] == vmwareengine.FetchNetworkPolicyExternalAddressesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNodeTypesPager) + assert isinstance(response, pagers.FetchNetworkPolicyExternalAddressesPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] -def test_list_node_types_empty_call(): +def test_fetch_network_policy_external_addresses_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -4743,16 +4964,19 @@ def test_list_node_types_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: - client.list_node_types() + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: + client.fetch_network_policy_external_addresses() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNodeTypesRequest() + assert args[0] == vmwareengine.FetchNetworkPolicyExternalAddressesRequest() @pytest.mark.asyncio -async def test_list_node_types_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.ListNodeTypesRequest +async def test_fetch_network_policy_external_addresses_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.FetchNetworkPolicyExternalAddressesRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4764,47 +4988,49 @@ async def test_list_node_types_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNodeTypesResponse( + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) ) - response = await client.list_node_types(request) + response = await client.fetch_network_policy_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNodeTypesRequest() + assert args[0] == vmwareengine.FetchNetworkPolicyExternalAddressesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNodeTypesAsyncPager) + assert isinstance(response, pagers.FetchNetworkPolicyExternalAddressesAsyncPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_node_types_async_from_dict(): - await test_list_node_types_async(request_type=dict) +async def test_fetch_network_policy_external_addresses_async_from_dict(): + await test_fetch_network_policy_external_addresses_async(request_type=dict) -def test_list_node_types_field_headers(): +def test_fetch_network_policy_external_addresses_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListNodeTypesRequest() + request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest() - request.parent = "parent_value" + request.network_policy = "network_policy_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: - call.return_value = vmwareengine.ListNodeTypesResponse() - client.list_node_types(request) + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: + call.return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + client.fetch_network_policy_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4815,28 +5041,30 @@ def test_list_node_types_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "network_policy=network_policy_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_node_types_field_headers_async(): +async def test_fetch_network_policy_external_addresses_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListNodeTypesRequest() + request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest() - request.parent = "parent_value" + request.network_policy = "network_policy_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNodeTypesResponse() + vmwareengine.FetchNetworkPolicyExternalAddressesResponse() ) - await client.list_node_types(request) + await client.fetch_network_policy_external_addresses(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4847,35 +5075,37 @@ async def test_list_node_types_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "network_policy=network_policy_value", ) in kw["metadata"] -def test_list_node_types_flattened(): +def test_fetch_network_policy_external_addresses_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNodeTypesResponse() + call.return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_node_types( - parent="parent_value", + client.fetch_network_policy_external_addresses( + network_policy="network_policy_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].network_policy + mock_val = "network_policy_value" assert arg == mock_val -def test_list_node_types_flattened_error(): +def test_fetch_network_policy_external_addresses_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4883,43 +5113,45 @@ def test_list_node_types_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_node_types( - vmwareengine.ListNodeTypesRequest(), - parent="parent_value", + client.fetch_network_policy_external_addresses( + vmwareengine.FetchNetworkPolicyExternalAddressesRequest(), + network_policy="network_policy_value", ) @pytest.mark.asyncio -async def test_list_node_types_flattened_async(): +async def test_fetch_network_policy_external_addresses_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNodeTypesResponse() + call.return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNodeTypesResponse() + vmwareengine.FetchNetworkPolicyExternalAddressesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_node_types( - parent="parent_value", + response = await client.fetch_network_policy_external_addresses( + network_policy="network_policy_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].network_policy + mock_val = "network_policy_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_node_types_flattened_error_async(): +async def test_fetch_network_policy_external_addresses_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4927,44 +5159,46 @@ async def test_list_node_types_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_node_types( - vmwareengine.ListNodeTypesRequest(), - parent="parent_value", + await client.fetch_network_policy_external_addresses( + vmwareengine.FetchNetworkPolicyExternalAddressesRequest(), + network_policy="network_policy_value", ) -def test_list_node_types_pager(transport_name: str = "grpc"): +def test_fetch_network_policy_external_addresses_pager(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], next_page_token="abc", ), - vmwareengine.ListNodeTypesResponse( - node_types=[], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[], next_page_token="def", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), ], next_page_token="ghi", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], ), RuntimeError, @@ -4972,97 +5206,103 @@ def test_list_node_types_pager(transport_name: str = "grpc"): metadata = () metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + gapic_v1.routing_header.to_grpc_metadata((("network_policy", ""),)), ) - pager = client.list_node_types(request={}) + pager = client.fetch_network_policy_external_addresses(request={}) assert pager._metadata == metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in results + ) -def test_list_node_types_pages(transport_name: str = "grpc"): +def test_fetch_network_policy_external_addresses_pages(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_network_policy_external_addresses), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], next_page_token="abc", ), - vmwareengine.ListNodeTypesResponse( - node_types=[], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[], next_page_token="def", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), ], next_page_token="ghi", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], ), RuntimeError, ) - pages = list(client.list_node_types(request={}).pages) + pages = list(client.fetch_network_policy_external_addresses(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_node_types_async_pager(): +async def test_fetch_network_policy_external_addresses_async_pager(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_node_types), "__call__", new_callable=mock.AsyncMock + type(client.transport.fetch_network_policy_external_addresses), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], next_page_token="abc", ), - vmwareengine.ListNodeTypesResponse( - node_types=[], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[], next_page_token="def", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), ], next_page_token="ghi", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], ), RuntimeError, ) - async_pager = await client.list_node_types( + async_pager = await client.fetch_network_policy_external_addresses( request={}, ) assert async_pager.next_page_token == "abc" @@ -5071,43 +5311,47 @@ async def test_list_node_types_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, vmwareengine_resources.NodeType) for i in responses) + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in responses + ) @pytest.mark.asyncio -async def test_list_node_types_async_pages(): +async def test_fetch_network_policy_external_addresses_async_pages(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_node_types), "__call__", new_callable=mock.AsyncMock + type(client.transport.fetch_network_policy_external_addresses), + "__call__", + new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], next_page_token="abc", ), - vmwareengine.ListNodeTypesResponse( - node_types=[], + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[], next_page_token="def", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), ], next_page_token="ghi", ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), ], ), RuntimeError, @@ -5116,7 +5360,7 @@ async def test_list_node_types_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_node_types(request={}) + await client.fetch_network_policy_external_addresses(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -5126,11 +5370,11 @@ async def test_list_node_types_async_pages(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetNodeTypeRequest, + vmwareengine.GetExternalAddressRequest, dict, ], ) -def test_get_node_type(request_type, transport: str = "grpc"): +def test_get_external_address(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5141,38 +5385,36 @@ def test_get_node_type(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NodeType( + call.return_value = vmwareengine_resources.ExternalAddress( name="name_value", - node_type_id="node_type_id_value", - display_name="display_name_value", - virtual_cpu_count=1846, - total_core_count=1716, - memory_gb=961, - disk_size_gb=1261, - available_custom_core_counts=[2974], + internal_ip="internal_ip_value", + external_ip="external_ip_value", + state=vmwareengine_resources.ExternalAddress.State.ACTIVE, + uid="uid_value", + description="description_value", ) - response = client.get_node_type(request) + response = client.get_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNodeTypeRequest() + assert args[0] == vmwareengine.GetExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NodeType) + assert isinstance(response, vmwareengine_resources.ExternalAddress) assert response.name == "name_value" - assert response.node_type_id == "node_type_id_value" - assert response.display_name == "display_name_value" - assert response.virtual_cpu_count == 1846 - assert response.total_core_count == 1716 - assert response.memory_gb == 961 - assert response.disk_size_gb == 1261 - assert response.available_custom_core_counts == [2974] + assert response.internal_ip == "internal_ip_value" + assert response.external_ip == "external_ip_value" + assert response.state == vmwareengine_resources.ExternalAddress.State.ACTIVE + assert response.uid == "uid_value" + assert response.description == "description_value" -def test_get_node_type_empty_call(): +def test_get_external_address_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -5181,16 +5423,18 @@ def test_get_node_type_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: - client.get_node_type() + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: + client.get_external_address() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNodeTypeRequest() + assert args[0] == vmwareengine.GetExternalAddressRequest() @pytest.mark.asyncio -async def test_get_node_type_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.GetNodeTypeRequest +async def test_get_external_address_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetExternalAddressRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5202,59 +5446,59 @@ async def test_get_node_type_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NodeType( + vmwareengine_resources.ExternalAddress( name="name_value", - node_type_id="node_type_id_value", - display_name="display_name_value", - virtual_cpu_count=1846, - total_core_count=1716, - memory_gb=961, - disk_size_gb=1261, - available_custom_core_counts=[2974], + internal_ip="internal_ip_value", + external_ip="external_ip_value", + state=vmwareengine_resources.ExternalAddress.State.ACTIVE, + uid="uid_value", + description="description_value", ) ) - response = await client.get_node_type(request) + response = await client.get_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNodeTypeRequest() + assert args[0] == vmwareengine.GetExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NodeType) + assert isinstance(response, vmwareengine_resources.ExternalAddress) assert response.name == "name_value" - assert response.node_type_id == "node_type_id_value" - assert response.display_name == "display_name_value" - assert response.virtual_cpu_count == 1846 - assert response.total_core_count == 1716 - assert response.memory_gb == 961 - assert response.disk_size_gb == 1261 - assert response.available_custom_core_counts == [2974] + assert response.internal_ip == "internal_ip_value" + assert response.external_ip == "external_ip_value" + assert response.state == vmwareengine_resources.ExternalAddress.State.ACTIVE + assert response.uid == "uid_value" + assert response.description == "description_value" @pytest.mark.asyncio -async def test_get_node_type_async_from_dict(): - await test_get_node_type_async(request_type=dict) +async def test_get_external_address_async_from_dict(): + await test_get_external_address_async(request_type=dict) -def test_get_node_type_field_headers(): +def test_get_external_address_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetNodeTypeRequest() + request = vmwareengine.GetExternalAddressRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: - call.return_value = vmwareengine_resources.NodeType() - client.get_node_type(request) + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: + call.return_value = vmwareengine_resources.ExternalAddress() + client.get_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5270,23 +5514,25 @@ def test_get_node_type_field_headers(): @pytest.mark.asyncio -async def test_get_node_type_field_headers_async(): +async def test_get_external_address_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetNodeTypeRequest() + request = vmwareengine.GetExternalAddressRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NodeType() + vmwareengine_resources.ExternalAddress() ) - await client.get_node_type(request) + await client.get_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5301,18 +5547,20 @@ async def test_get_node_type_field_headers_async(): ) in kw["metadata"] -def test_get_node_type_flattened(): +def test_get_external_address_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NodeType() + call.return_value = vmwareengine_resources.ExternalAddress() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_node_type( + client.get_external_address( name="name_value", ) @@ -5325,7 +5573,7 @@ def test_get_node_type_flattened(): assert arg == mock_val -def test_get_node_type_flattened_error(): +def test_get_external_address_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5333,29 +5581,31 @@ def test_get_node_type_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_node_type( - vmwareengine.GetNodeTypeRequest(), + client.get_external_address( + vmwareengine.GetExternalAddressRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_node_type_flattened_async(): +async def test_get_external_address_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + with mock.patch.object( + type(client.transport.get_external_address), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NodeType() + call.return_value = vmwareengine_resources.ExternalAddress() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NodeType() + vmwareengine_resources.ExternalAddress() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_node_type( + response = await client.get_external_address( name="name_value", ) @@ -5369,7 +5619,7 @@ async def test_get_node_type_flattened_async(): @pytest.mark.asyncio -async def test_get_node_type_flattened_error_async(): +async def test_get_external_address_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5377,8 +5627,8 @@ async def test_get_node_type_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_node_type( - vmwareengine.GetNodeTypeRequest(), + await client.get_external_address( + vmwareengine.GetExternalAddressRequest(), name="name_value", ) @@ -5386,11 +5636,11 @@ async def test_get_node_type_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ShowNsxCredentialsRequest, + vmwareengine.CreateExternalAddressRequest, dict, ], ) -def test_show_nsx_credentials(request_type, transport: str = "grpc"): +def test_create_external_address(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5402,27 +5652,22 @@ def test_show_nsx_credentials(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) - response = client.show_nsx_credentials(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowNsxCredentialsRequest() + assert args[0] == vmwareengine.CreateExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert isinstance(response, future.Future) -def test_show_nsx_credentials_empty_call(): +def test_create_external_address_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -5432,17 +5677,18 @@ def test_show_nsx_credentials_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: - client.show_nsx_credentials() + client.create_external_address() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowNsxCredentialsRequest() + assert args[0] == vmwareengine.CreateExternalAddressRequest() @pytest.mark.asyncio -async def test_show_nsx_credentials_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.ShowNsxCredentialsRequest +async def test_create_external_address_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateExternalAddressRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5455,50 +5701,45 @@ async def test_show_nsx_credentials_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.show_nsx_credentials(request) + response = await client.create_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowNsxCredentialsRequest() + assert args[0] == vmwareengine.CreateExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_show_nsx_credentials_async_from_dict(): - await test_show_nsx_credentials_async(request_type=dict) +async def test_create_external_address_async_from_dict(): + await test_create_external_address_async(request_type=dict) -def test_show_nsx_credentials_field_headers(): +def test_create_external_address_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ShowNsxCredentialsRequest() + request = vmwareengine.CreateExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: - call.return_value = vmwareengine_resources.Credentials() - client.show_nsx_credentials(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5509,30 +5750,30 @@ def test_show_nsx_credentials_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_show_nsx_credentials_field_headers_async(): +async def test_create_external_address_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ShowNsxCredentialsRequest() + request = vmwareengine.CreateExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials() + operations_pb2.Operation(name="operations/op") ) - await client.show_nsx_credentials(request) + await client.create_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5543,37 +5784,45 @@ async def test_show_nsx_credentials_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "parent=parent_value", ) in kw["metadata"] -def test_show_nsx_credentials_flattened(): +def test_create_external_address_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.show_nsx_credentials( - private_cloud="private_cloud_value", + client.create_external_address( + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].external_address + mock_val = vmwareengine_resources.ExternalAddress(name="name_value") + assert arg == mock_val + arg = args[0].external_address_id + mock_val = "external_address_id_value" assert arg == mock_val -def test_show_nsx_credentials_flattened_error(): +def test_create_external_address_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5581,45 +5830,55 @@ def test_show_nsx_credentials_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.show_nsx_credentials( - vmwareengine.ShowNsxCredentialsRequest(), - private_cloud="private_cloud_value", + client.create_external_address( + vmwareengine.CreateExternalAddressRequest(), + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", ) @pytest.mark.asyncio -async def test_show_nsx_credentials_flattened_async(): +async def test_create_external_address_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_nsx_credentials), "__call__" + type(client.transport.create_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.show_nsx_credentials( - private_cloud="private_cloud_value", + response = await client.create_external_address( + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].external_address + mock_val = vmwareengine_resources.ExternalAddress(name="name_value") + assert arg == mock_val + arg = args[0].external_address_id + mock_val = "external_address_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_show_nsx_credentials_flattened_error_async(): +async def test_create_external_address_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5627,20 +5886,22 @@ async def test_show_nsx_credentials_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.show_nsx_credentials( - vmwareengine.ShowNsxCredentialsRequest(), - private_cloud="private_cloud_value", + await client.create_external_address( + vmwareengine.CreateExternalAddressRequest(), + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ShowVcenterCredentialsRequest, + vmwareengine.UpdateExternalAddressRequest, dict, ], ) -def test_show_vcenter_credentials(request_type, transport: str = "grpc"): +def test_update_external_address(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5652,27 +5913,22 @@ def test_show_vcenter_credentials(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) - response = client.show_vcenter_credentials(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() + assert args[0] == vmwareengine.UpdateExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert isinstance(response, future.Future) -def test_show_vcenter_credentials_empty_call(): +def test_update_external_address_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -5682,18 +5938,18 @@ def test_show_vcenter_credentials_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: - client.show_vcenter_credentials() + client.update_external_address() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() + assert args[0] == vmwareengine.UpdateExternalAddressRequest() @pytest.mark.asyncio -async def test_show_vcenter_credentials_async( +async def test_update_external_address_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.ShowVcenterCredentialsRequest, + request_type=vmwareengine.UpdateExternalAddressRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5706,50 +5962,45 @@ async def test_show_vcenter_credentials_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.show_vcenter_credentials(request) + response = await client.update_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() + assert args[0] == vmwareengine.UpdateExternalAddressRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_show_vcenter_credentials_async_from_dict(): - await test_show_vcenter_credentials_async(request_type=dict) +async def test_update_external_address_async_from_dict(): + await test_update_external_address_async(request_type=dict) -def test_show_vcenter_credentials_field_headers(): +def test_update_external_address_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ShowVcenterCredentialsRequest() + request = vmwareengine.UpdateExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.external_address.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: - call.return_value = vmwareengine_resources.Credentials() - client.show_vcenter_credentials(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5760,30 +6011,30 @@ def test_show_vcenter_credentials_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "external_address.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_show_vcenter_credentials_field_headers_async(): +async def test_update_external_address_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ShowVcenterCredentialsRequest() + request = vmwareengine.UpdateExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.external_address.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials() + operations_pb2.Operation(name="operations/op") ) - await client.show_vcenter_credentials(request) + await client.update_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5794,37 +6045,41 @@ async def test_show_vcenter_credentials_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "external_address.name=name_value", ) in kw["metadata"] -def test_show_vcenter_credentials_flattened(): +def test_update_external_address_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.show_vcenter_credentials( - private_cloud="private_cloud_value", + client.update_external_address( + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].external_address + mock_val = vmwareengine_resources.ExternalAddress(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_show_vcenter_credentials_flattened_error(): +def test_update_external_address_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5832,45 +6087,50 @@ def test_show_vcenter_credentials_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.show_vcenter_credentials( - vmwareengine.ShowVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + client.update_external_address( + vmwareengine.UpdateExternalAddressRequest(), + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_show_vcenter_credentials_flattened_async(): +async def test_update_external_address_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.show_vcenter_credentials), "__call__" + type(client.transport.update_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.Credentials() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.Credentials() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.show_vcenter_credentials( - private_cloud="private_cloud_value", + response = await client.update_external_address( + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].external_address + mock_val = vmwareengine_resources.ExternalAddress(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_show_vcenter_credentials_flattened_error_async(): +async def test_update_external_address_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5878,20 +6138,21 @@ async def test_show_vcenter_credentials_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.show_vcenter_credentials( - vmwareengine.ShowVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + await client.update_external_address( + vmwareengine.UpdateExternalAddressRequest(), + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ResetNsxCredentialsRequest, + vmwareengine.DeleteExternalAddressRequest, dict, ], ) -def test_reset_nsx_credentials(request_type, transport: str = "grpc"): +def test_delete_external_address(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5903,22 +6164,22 @@ def test_reset_nsx_credentials(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.reset_nsx_credentials(request) + response = client.delete_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetNsxCredentialsRequest() + assert args[0] == vmwareengine.DeleteExternalAddressRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_reset_nsx_credentials_empty_call(): +def test_delete_external_address_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -5928,18 +6189,18 @@ def test_reset_nsx_credentials_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: - client.reset_nsx_credentials() + client.delete_external_address() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetNsxCredentialsRequest() + assert args[0] == vmwareengine.DeleteExternalAddressRequest() @pytest.mark.asyncio -async def test_reset_nsx_credentials_async( +async def test_delete_external_address_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.ResetNsxCredentialsRequest, + request_type=vmwareengine.DeleteExternalAddressRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5952,45 +6213,45 @@ async def test_reset_nsx_credentials_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.reset_nsx_credentials(request) + response = await client.delete_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetNsxCredentialsRequest() + assert args[0] == vmwareengine.DeleteExternalAddressRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_reset_nsx_credentials_async_from_dict(): - await test_reset_nsx_credentials_async(request_type=dict) +async def test_delete_external_address_async_from_dict(): + await test_delete_external_address_async(request_type=dict) -def test_reset_nsx_credentials_field_headers(): +def test_delete_external_address_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ResetNsxCredentialsRequest() + request = vmwareengine.DeleteExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.reset_nsx_credentials(request) + client.delete_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6001,30 +6262,30 @@ def test_reset_nsx_credentials_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_reset_nsx_credentials_field_headers_async(): +async def test_delete_external_address_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ResetNsxCredentialsRequest() + request = vmwareengine.DeleteExternalAddressRequest() - request.private_cloud = "private_cloud_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.reset_nsx_credentials(request) + await client.delete_external_address(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6035,37 +6296,37 @@ async def test_reset_nsx_credentials_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "name=name_value", ) in kw["metadata"] -def test_reset_nsx_credentials_flattened(): +def test_delete_external_address_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.reset_nsx_credentials( - private_cloud="private_cloud_value", + client.delete_external_address( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_reset_nsx_credentials_flattened_error(): +def test_delete_external_address_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6073,21 +6334,21 @@ def test_reset_nsx_credentials_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.reset_nsx_credentials( - vmwareengine.ResetNsxCredentialsRequest(), - private_cloud="private_cloud_value", + client.delete_external_address( + vmwareengine.DeleteExternalAddressRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_reset_nsx_credentials_flattened_async(): +async def test_delete_external_address_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reset_nsx_credentials), "__call__" + type(client.transport.delete_external_address), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -6097,21 +6358,21 @@ async def test_reset_nsx_credentials_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.reset_nsx_credentials( - private_cloud="private_cloud_value", + response = await client.delete_external_address( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_reset_nsx_credentials_flattened_error_async(): +async def test_delete_external_address_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6119,20 +6380,20 @@ async def test_reset_nsx_credentials_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.reset_nsx_credentials( - vmwareengine.ResetNsxCredentialsRequest(), - private_cloud="private_cloud_value", + await client.delete_external_address( + vmwareengine.DeleteExternalAddressRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ResetVcenterCredentialsRequest, + vmwareengine.ListSubnetsRequest, dict, ], ) -def test_reset_vcenter_credentials(request_type, transport: str = "grpc"): +def test_list_subnets(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6143,23 +6404,26 @@ def test_reset_vcenter_credentials(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.reset_vcenter_credentials(request) + call.return_value = vmwareengine.ListSubnetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_subnets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() + assert args[0] == vmwareengine.ListSubnetsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListSubnetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_reset_vcenter_credentials_empty_call(): +def test_list_subnets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -6168,19 +6432,16 @@ def test_reset_vcenter_credentials_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: - client.reset_vcenter_credentials() + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + client.list_subnets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() + assert args[0] == vmwareengine.ListSubnetsRequest() @pytest.mark.asyncio -async def test_reset_vcenter_credentials_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.ResetVcenterCredentialsRequest, +async def test_list_subnets_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ListSubnetsRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6192,46 +6453,47 @@ async def test_reset_vcenter_credentials_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListSubnetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.reset_vcenter_credentials(request) + response = await client.list_subnets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() + assert args[0] == vmwareengine.ListSubnetsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListSubnetsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_reset_vcenter_credentials_async_from_dict(): - await test_reset_vcenter_credentials_async(request_type=dict) +async def test_list_subnets_async_from_dict(): + await test_list_subnets_async(request_type=dict) -def test_reset_vcenter_credentials_field_headers(): +def test_list_subnets_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ResetVcenterCredentialsRequest() + request = vmwareengine.ListSubnetsRequest() - request.private_cloud = "private_cloud_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.reset_vcenter_credentials(request) + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + call.return_value = vmwareengine.ListSubnetsResponse() + client.list_subnets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6242,30 +6504,28 @@ def test_reset_vcenter_credentials_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_reset_vcenter_credentials_field_headers_async(): +async def test_list_subnets_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ResetVcenterCredentialsRequest() + request = vmwareengine.ListSubnetsRequest() - request.private_cloud = "private_cloud_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine.ListSubnetsResponse() ) - await client.reset_vcenter_credentials(request) + await client.list_subnets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6276,37 +6536,35 @@ async def test_reset_vcenter_credentials_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_cloud=private_cloud_value", + "parent=parent_value", ) in kw["metadata"] -def test_reset_vcenter_credentials_flattened(): +def test_list_subnets_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListSubnetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.reset_vcenter_credentials( - private_cloud="private_cloud_value", + client.list_subnets( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_reset_vcenter_credentials_flattened_error(): +def test_list_subnets_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6314,45 +6572,43 @@ def test_reset_vcenter_credentials_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.reset_vcenter_credentials( - vmwareengine.ResetVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + client.list_subnets( + vmwareengine.ListSubnetsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_reset_vcenter_credentials_flattened_async(): +async def test_list_subnets_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_vcenter_credentials), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListSubnetsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListSubnetsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.reset_vcenter_credentials( - private_cloud="private_cloud_value", + response = await client.list_subnets( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].private_cloud - mock_val = "private_cloud_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_reset_vcenter_credentials_flattened_error_async(): +async def test_list_subnets_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6360,20 +6616,210 @@ async def test_reset_vcenter_credentials_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.reset_vcenter_credentials( - vmwareengine.ResetVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + await client.list_subnets( + vmwareengine.ListSubnetsRequest(), + parent="parent_value", + ) + + +def test_list_subnets_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + next_page_token="abc", + ), + vmwareengine.ListSubnetsResponse( + subnets=[], + next_page_token="def", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + ], + next_page_token="ghi", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_subnets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) + + +def test_list_subnets_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_subnets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + next_page_token="abc", + ), + vmwareengine.ListSubnetsResponse( + subnets=[], + next_page_token="def", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + ], + next_page_token="ghi", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subnets(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subnets_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subnets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + next_page_token="abc", + ), + vmwareengine.ListSubnetsResponse( + subnets=[], + next_page_token="def", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + ], + next_page_token="ghi", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subnets( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in responses) + + +@pytest.mark.asyncio +async def test_list_subnets_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subnets), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + next_page_token="abc", + ), + vmwareengine.ListSubnetsResponse( + subnets=[], + next_page_token="def", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + ], + next_page_token="ghi", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_subnets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateHcxActivationKeyRequest, + vmwareengine.GetSubnetRequest, dict, ], ) -def test_create_hcx_activation_key(request_type, transport: str = "grpc"): +def test_get_subnet(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6384,23 +6830,34 @@ def test_create_hcx_activation_key(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_hcx_activation_key(request) + call.return_value = vmwareengine_resources.Subnet( + name="name_value", + ip_cidr_range="ip_cidr_range_value", + gateway_ip="gateway_ip_value", + type_="type__value", + state=vmwareengine_resources.Subnet.State.ACTIVE, + vlan_id=733, + ) + response = client.get_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetSubnetRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.Subnet) + assert response.name == "name_value" + assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.gateway_ip == "gateway_ip_value" + assert response.type_ == "type__value" + assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert response.vlan_id == 733 -def test_create_hcx_activation_key_empty_call(): +def test_get_subnet_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -6409,19 +6866,16 @@ def test_create_hcx_activation_key_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: - client.create_hcx_activation_key() + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + client.get_subnet() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetSubnetRequest() @pytest.mark.asyncio -async def test_create_hcx_activation_key_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.CreateHcxActivationKeyRequest, +async def test_get_subnet_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetSubnetRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6433,46 +6887,55 @@ async def test_create_hcx_activation_key_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.Subnet( + name="name_value", + ip_cidr_range="ip_cidr_range_value", + gateway_ip="gateway_ip_value", + type_="type__value", + state=vmwareengine_resources.Subnet.State.ACTIVE, + vlan_id=733, + ) ) - response = await client.create_hcx_activation_key(request) + response = await client.get_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetSubnetRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.Subnet) + assert response.name == "name_value" + assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.gateway_ip == "gateway_ip_value" + assert response.type_ == "type__value" + assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert response.vlan_id == 733 @pytest.mark.asyncio -async def test_create_hcx_activation_key_async_from_dict(): - await test_create_hcx_activation_key_async(request_type=dict) +async def test_get_subnet_async_from_dict(): + await test_get_subnet_async(request_type=dict) -def test_create_hcx_activation_key_field_headers(): +def test_get_subnet_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreateHcxActivationKeyRequest() + request = vmwareengine.GetSubnetRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_hcx_activation_key(request) + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: + call.return_value = vmwareengine_resources.Subnet() + client.get_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6483,30 +6946,28 @@ def test_create_hcx_activation_key_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_hcx_activation_key_field_headers_async(): +async def test_get_subnet_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreateHcxActivationKeyRequest() + request = vmwareengine.GetSubnetRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine_resources.Subnet() ) - await client.create_hcx_activation_key(request) + await client.get_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6517,47 +6978,35 @@ async def test_create_hcx_activation_key_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_hcx_activation_key_flattened(): +def test_get_subnet_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.Subnet() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_hcx_activation_key( - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + client.get_subnet( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].hcx_activation_key - mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") - assert arg == mock_val - arg = args[0].hcx_activation_key_id - mock_val = "hcx_activation_key_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_hcx_activation_key_flattened_error(): +def test_get_subnet_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6565,59 +7014,43 @@ def test_create_hcx_activation_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_hcx_activation_key( - vmwareengine.CreateHcxActivationKeyRequest(), - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + client.get_subnet( + vmwareengine.GetSubnetRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_hcx_activation_key_flattened_async(): +async def test_get_subnet_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_hcx_activation_key), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subnet), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.Subnet() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.Subnet() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_hcx_activation_key( - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + response = await client.get_subnet( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].hcx_activation_key - mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") - assert arg == mock_val - arg = args[0].hcx_activation_key_id - mock_val = "hcx_activation_key_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_hcx_activation_key_flattened_error_async(): +async def test_get_subnet_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6625,24 +7058,20 @@ async def test_create_hcx_activation_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_hcx_activation_key( - vmwareengine.CreateHcxActivationKeyRequest(), - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + await client.get_subnet( + vmwareengine.GetSubnetRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListHcxActivationKeysRequest, + vmwareengine.UpdateSubnetRequest, dict, ], ) -def test_list_hcx_activation_keys(request_type, transport: str = "grpc"): +def test_update_subnet(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6653,28 +7082,21 @@ def test_list_hcx_activation_keys(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListHcxActivationKeysResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_hcx_activation_keys(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + assert args[0] == vmwareengine.UpdateSubnetRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHcxActivationKeysPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_hcx_activation_keys_empty_call(): +def test_update_subnet_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -6683,19 +7105,16 @@ def test_list_hcx_activation_keys_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: - client.list_hcx_activation_keys() + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + client.update_subnet() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + assert args[0] == vmwareengine.UpdateSubnetRequest() @pytest.mark.asyncio -async def test_list_hcx_activation_keys_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.ListHcxActivationKeysRequest, +async def test_update_subnet_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.UpdateSubnetRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6707,51 +7126,42 @@ async def test_list_hcx_activation_keys_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListHcxActivationKeysResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_hcx_activation_keys(request) + response = await client.update_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + assert args[0] == vmwareengine.UpdateSubnetRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHcxActivationKeysAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_hcx_activation_keys_async_from_dict(): - await test_list_hcx_activation_keys_async(request_type=dict) +async def test_update_subnet_async_from_dict(): + await test_update_subnet_async(request_type=dict) -def test_list_hcx_activation_keys_field_headers(): +def test_update_subnet_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListHcxActivationKeysRequest() + request = vmwareengine.UpdateSubnetRequest() - request.parent = "parent_value" + request.subnet.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: - call.return_value = vmwareengine.ListHcxActivationKeysResponse() - client.list_hcx_activation_keys(request) + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6762,30 +7172,28 @@ def test_list_hcx_activation_keys_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "subnet.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_hcx_activation_keys_field_headers_async(): +async def test_update_subnet_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListHcxActivationKeysRequest() + request = vmwareengine.UpdateSubnetRequest() - request.parent = "parent_value" + request.subnet.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListHcxActivationKeysResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_hcx_activation_keys(request) + await client.update_subnet(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6796,37 +7204,39 @@ async def test_list_hcx_activation_keys_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "subnet.name=name_value", ) in kw["metadata"] -def test_list_hcx_activation_keys_flattened(): +def test_update_subnet_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListHcxActivationKeysResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_hcx_activation_keys( - parent="parent_value", + client.update_subnet( + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].subnet + mock_val = vmwareengine_resources.Subnet(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_hcx_activation_keys_flattened_error(): +def test_update_subnet_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6834,31 +7244,286 @@ def test_list_hcx_activation_keys_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hcx_activation_keys( - vmwareengine.ListHcxActivationKeysRequest(), + client.update_subnet( + vmwareengine.UpdateSubnetRequest(), + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_subnet_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_subnet), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_subnet( + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subnet + mock_val = vmwareengine_resources.Subnet(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_subnet_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_subnet( + vmwareengine.UpdateSubnetRequest(), + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListExternalAccessRulesRequest, + dict, + ], +) +def test_list_external_access_rules(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListExternalAccessRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_external_access_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListExternalAccessRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExternalAccessRulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_external_access_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + client.list_external_access_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListExternalAccessRulesRequest() + + +@pytest.mark.asyncio +async def test_list_external_access_rules_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListExternalAccessRulesRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListExternalAccessRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_external_access_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListExternalAccessRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExternalAccessRulesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_external_access_rules_async_from_dict(): + await test_list_external_access_rules_async(request_type=dict) + + +def test_list_external_access_rules_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListExternalAccessRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + call.return_value = vmwareengine.ListExternalAccessRulesResponse() + client.list_external_access_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_external_access_rules_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListExternalAccessRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListExternalAccessRulesResponse() + ) + await client.list_external_access_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_external_access_rules_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_external_access_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListExternalAccessRulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_external_access_rules( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_external_access_rules_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_external_access_rules( + vmwareengine.ListExternalAccessRulesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_hcx_activation_keys_flattened_async(): +async def test_list_external_access_rules_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" + type(client.transport.list_external_access_rules), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListHcxActivationKeysResponse() + call.return_value = vmwareengine.ListExternalAccessRulesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListHcxActivationKeysResponse() + vmwareengine.ListExternalAccessRulesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_hcx_activation_keys( + response = await client.list_external_access_rules( parent="parent_value", ) @@ -6872,7 +7537,7 @@ async def test_list_hcx_activation_keys_flattened_async(): @pytest.mark.asyncio -async def test_list_hcx_activation_keys_flattened_error_async(): +async def test_list_external_access_rules_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6880,13 +7545,13 @@ async def test_list_hcx_activation_keys_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_hcx_activation_keys( - vmwareengine.ListHcxActivationKeysRequest(), + await client.list_external_access_rules( + vmwareengine.ListExternalAccessRulesRequest(), parent="parent_value", ) -def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): +def test_list_external_access_rules_pager(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, @@ -6894,32 +7559,32 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" + type(client.transport.list_external_access_rules), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="abc", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[], + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[], next_page_token="def", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="ghi", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], ), RuntimeError, @@ -6929,18 +7594,18 @@ def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_hcx_activation_keys(request={}) + pager = client.list_external_access_rules(request={}) assert pager._metadata == metadata results = list(pager) assert len(results) == 6 assert all( - isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results + isinstance(i, vmwareengine_resources.ExternalAccessRule) for i in results ) -def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): +def test_list_external_access_rules_pages(transport_name: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, @@ -6948,82 +7613,82 @@ def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_hcx_activation_keys), "__call__" + type(client.transport.list_external_access_rules), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="abc", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[], + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[], next_page_token="def", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="ghi", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], ), RuntimeError, ) - pages = list(client.list_hcx_activation_keys(request={}).pages) + pages = list(client.list_external_access_rules(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_hcx_activation_keys_async_pager(): +async def test_list_external_access_rules_async_pager(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_hcx_activation_keys), + type(client.transport.list_external_access_rules), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="abc", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[], + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[], next_page_token="def", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="ghi", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], ), RuntimeError, ) - async_pager = await client.list_hcx_activation_keys( + async_pager = await client.list_external_access_rules( request={}, ) assert async_pager.next_page_token == "abc" @@ -7033,46 +7698,46 @@ async def test_list_hcx_activation_keys_async_pager(): assert len(responses) == 6 assert all( - isinstance(i, vmwareengine_resources.HcxActivationKey) for i in responses + isinstance(i, vmwareengine_resources.ExternalAccessRule) for i in responses ) @pytest.mark.asyncio -async def test_list_hcx_activation_keys_async_pages(): +async def test_list_external_access_rules_async_pages(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_hcx_activation_keys), + type(client.transport.list_external_access_rules), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="abc", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[], + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[], next_page_token="def", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), ], next_page_token="ghi", ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), ], ), RuntimeError, @@ -7081,7 +7746,7 @@ async def test_list_hcx_activation_keys_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_hcx_activation_keys(request={}) + await client.list_external_access_rules(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -7091,11 +7756,11 @@ async def test_list_hcx_activation_keys_async_pages(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetHcxActivationKeyRequest, + vmwareengine.GetExternalAccessRuleRequest, dict, ], ) -def test_get_hcx_activation_key(request_type, transport: str = "grpc"): +def test_get_external_access_rule(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7107,31 +7772,41 @@ def test_get_hcx_activation_key(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.HcxActivationKey( + call.return_value = vmwareengine_resources.ExternalAccessRule( name="name_value", - state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, - activation_key="activation_key_value", + description="description_value", + priority=898, + action=vmwareengine_resources.ExternalAccessRule.Action.ALLOW, + ip_protocol="ip_protocol_value", + source_ports=["source_ports_value"], + destination_ports=["destination_ports_value"], + state=vmwareengine_resources.ExternalAccessRule.State.ACTIVE, uid="uid_value", ) - response = client.get_hcx_activation_key(request) + response = client.get_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.HcxActivationKey) + assert isinstance(response, vmwareengine_resources.ExternalAccessRule) assert response.name == "name_value" - assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE - assert response.activation_key == "activation_key_value" + assert response.description == "description_value" + assert response.priority == 898 + assert response.action == vmwareengine_resources.ExternalAccessRule.Action.ALLOW + assert response.ip_protocol == "ip_protocol_value" + assert response.source_ports == ["source_ports_value"] + assert response.destination_ports == ["destination_ports_value"] + assert response.state == vmwareengine_resources.ExternalAccessRule.State.ACTIVE assert response.uid == "uid_value" -def test_get_hcx_activation_key_empty_call(): +def test_get_external_access_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -7141,18 +7816,18 @@ def test_get_hcx_activation_key_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: - client.get_hcx_activation_key() + client.get_external_access_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetExternalAccessRuleRequest() @pytest.mark.asyncio -async def test_get_hcx_activation_key_async( +async def test_get_external_access_rule_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.GetHcxActivationKeyRequest, + request_type=vmwareengine.GetExternalAccessRuleRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7165,54 +7840,64 @@ async def test_get_hcx_activation_key_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.HcxActivationKey( + vmwareengine_resources.ExternalAccessRule( name="name_value", - state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, - activation_key="activation_key_value", + description="description_value", + priority=898, + action=vmwareengine_resources.ExternalAccessRule.Action.ALLOW, + ip_protocol="ip_protocol_value", + source_ports=["source_ports_value"], + destination_ports=["destination_ports_value"], + state=vmwareengine_resources.ExternalAccessRule.State.ACTIVE, uid="uid_value", ) ) - response = await client.get_hcx_activation_key(request) + response = await client.get_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + assert args[0] == vmwareengine.GetExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.HcxActivationKey) + assert isinstance(response, vmwareengine_resources.ExternalAccessRule) assert response.name == "name_value" - assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE - assert response.activation_key == "activation_key_value" + assert response.description == "description_value" + assert response.priority == 898 + assert response.action == vmwareengine_resources.ExternalAccessRule.Action.ALLOW + assert response.ip_protocol == "ip_protocol_value" + assert response.source_ports == ["source_ports_value"] + assert response.destination_ports == ["destination_ports_value"] + assert response.state == vmwareengine_resources.ExternalAccessRule.State.ACTIVE assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_get_hcx_activation_key_async_from_dict(): - await test_get_hcx_activation_key_async(request_type=dict) +async def test_get_external_access_rule_async_from_dict(): + await test_get_external_access_rule_async(request_type=dict) -def test_get_hcx_activation_key_field_headers(): +def test_get_external_access_rule_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetHcxActivationKeyRequest() + request = vmwareengine.GetExternalAccessRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: - call.return_value = vmwareengine_resources.HcxActivationKey() - client.get_hcx_activation_key(request) + call.return_value = vmwareengine_resources.ExternalAccessRule() + client.get_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7228,25 +7913,25 @@ def test_get_hcx_activation_key_field_headers(): @pytest.mark.asyncio -async def test_get_hcx_activation_key_field_headers_async(): +async def test_get_external_access_rule_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetHcxActivationKeyRequest() + request = vmwareengine.GetExternalAccessRuleRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.HcxActivationKey() + vmwareengine_resources.ExternalAccessRule() ) - await client.get_hcx_activation_key(request) + await client.get_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7261,20 +7946,20 @@ async def test_get_hcx_activation_key_field_headers_async(): ) in kw["metadata"] -def test_get_hcx_activation_key_flattened(): +def test_get_external_access_rule_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.HcxActivationKey() + call.return_value = vmwareengine_resources.ExternalAccessRule() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_hcx_activation_key( + client.get_external_access_rule( name="name_value", ) @@ -7287,7 +7972,7 @@ def test_get_hcx_activation_key_flattened(): assert arg == mock_val -def test_get_hcx_activation_key_flattened_error(): +def test_get_external_access_rule_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7295,31 +7980,31 @@ def test_get_hcx_activation_key_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hcx_activation_key( - vmwareengine.GetHcxActivationKeyRequest(), + client.get_external_access_rule( + vmwareengine.GetExternalAccessRuleRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_hcx_activation_key_flattened_async(): +async def test_get_external_access_rule_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_hcx_activation_key), "__call__" + type(client.transport.get_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.HcxActivationKey() + call.return_value = vmwareengine_resources.ExternalAccessRule() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.HcxActivationKey() + vmwareengine_resources.ExternalAccessRule() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_hcx_activation_key( + response = await client.get_external_access_rule( name="name_value", ) @@ -7333,7 +8018,7 @@ async def test_get_hcx_activation_key_flattened_async(): @pytest.mark.asyncio -async def test_get_hcx_activation_key_flattened_error_async(): +async def test_get_external_access_rule_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7341,8 +8026,8 @@ async def test_get_hcx_activation_key_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_hcx_activation_key( - vmwareengine.GetHcxActivationKeyRequest(), + await client.get_external_access_rule( + vmwareengine.GetExternalAccessRuleRequest(), name="name_value", ) @@ -7350,11 +8035,11 @@ async def test_get_hcx_activation_key_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetNetworkPolicyRequest, + vmwareengine.CreateExternalAccessRuleRequest, dict, ], ) -def test_get_network_policy(request_type, transport: str = "grpc"): +def test_create_external_access_rule(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7366,38 +8051,22 @@ def test_get_network_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NetworkPolicy( - name="name_value", - edge_services_cidr="edge_services_cidr_value", - uid="uid_value", - vmware_engine_network="vmware_engine_network_value", - description="description_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - ) - response = client.get_network_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNetworkPolicyRequest() + assert args[0] == vmwareengine.CreateExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NetworkPolicy) - assert response.name == "name_value" - assert response.edge_services_cidr == "edge_services_cidr_value" - assert response.uid == "uid_value" - assert response.vmware_engine_network == "vmware_engine_network_value" - assert response.description == "description_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) + assert isinstance(response, future.Future) -def test_get_network_policy_empty_call(): +def test_create_external_access_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -7407,17 +8076,18 @@ def test_get_network_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: - client.get_network_policy() + client.create_external_access_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNetworkPolicyRequest() + assert args[0] == vmwareengine.CreateExternalAccessRuleRequest() @pytest.mark.asyncio -async def test_get_network_policy_async( - transport: str = "grpc_asyncio", request_type=vmwareengine.GetNetworkPolicyRequest +async def test_create_external_access_rule_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateExternalAccessRuleRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7430,61 +8100,45 @@ async def test_get_network_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NetworkPolicy( - name="name_value", - edge_services_cidr="edge_services_cidr_value", - uid="uid_value", - vmware_engine_network="vmware_engine_network_value", - description="description_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_network_policy(request) + response = await client.create_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetNetworkPolicyRequest() + assert args[0] == vmwareengine.CreateExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NetworkPolicy) - assert response.name == "name_value" - assert response.edge_services_cidr == "edge_services_cidr_value" - assert response.uid == "uid_value" - assert response.vmware_engine_network == "vmware_engine_network_value" - assert response.description == "description_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_network_policy_async_from_dict(): - await test_get_network_policy_async(request_type=dict) +async def test_create_external_access_rule_async_from_dict(): + await test_create_external_access_rule_async(request_type=dict) -def test_get_network_policy_field_headers(): +def test_create_external_access_rule_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetNetworkPolicyRequest() + request = vmwareengine.CreateExternalAccessRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: - call.return_value = vmwareengine_resources.NetworkPolicy() - client.get_network_policy(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7495,30 +8149,30 @@ def test_get_network_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_network_policy_field_headers_async(): +async def test_create_external_access_rule_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetNetworkPolicyRequest() + request = vmwareengine.CreateExternalAccessRuleRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NetworkPolicy() + operations_pb2.Operation(name="operations/op") ) - await client.get_network_policy(request) + await client.create_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7529,37 +8183,47 @@ async def test_get_network_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_network_policy_flattened(): +def test_create_external_access_rule_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NetworkPolicy() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_network_policy( - name="name_value", + client.create_external_access_rule( + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].external_access_rule + mock_val = vmwareengine_resources.ExternalAccessRule(name="name_value") + assert arg == mock_val + arg = args[0].external_access_rule_id + mock_val = "external_access_rule_id_value" assert arg == mock_val -def test_get_network_policy_flattened_error(): +def test_create_external_access_rule_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7567,45 +8231,59 @@ def test_get_network_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_network_policy( - vmwareengine.GetNetworkPolicyRequest(), - name="name_value", + client.create_external_access_rule( + vmwareengine.CreateExternalAccessRuleRequest(), + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", ) @pytest.mark.asyncio -async def test_get_network_policy_flattened_async(): +async def test_create_external_access_rule_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_network_policy), "__call__" + type(client.transport.create_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.NetworkPolicy() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.NetworkPolicy() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_network_policy( - name="name_value", + response = await client.create_external_access_rule( + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].external_access_rule + mock_val = vmwareengine_resources.ExternalAccessRule(name="name_value") + assert arg == mock_val + arg = args[0].external_access_rule_id + mock_val = "external_access_rule_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_network_policy_flattened_error_async(): +async def test_create_external_access_rule_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7613,20 +8291,24 @@ async def test_get_network_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_network_policy( - vmwareengine.GetNetworkPolicyRequest(), - name="name_value", + await client.create_external_access_rule( + vmwareengine.CreateExternalAccessRuleRequest(), + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListNetworkPoliciesRequest, + vmwareengine.UpdateExternalAccessRuleRequest, dict, ], ) -def test_list_network_policies(request_type, transport: str = "grpc"): +def test_update_external_access_rule(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7638,27 +8320,22 @@ def test_list_network_policies(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNetworkPoliciesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_network_policies(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + assert args[0] == vmwareengine.UpdateExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworkPoliciesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_network_policies_empty_call(): +def test_update_external_access_rule_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -7668,18 +8345,18 @@ def test_list_network_policies_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: - client.list_network_policies() + client.update_external_access_rule() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + assert args[0] == vmwareengine.UpdateExternalAccessRuleRequest() @pytest.mark.asyncio -async def test_list_network_policies_async( +async def test_update_external_access_rule_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.ListNetworkPoliciesRequest, + request_type=vmwareengine.UpdateExternalAccessRuleRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7692,50 +8369,45 @@ async def test_list_network_policies_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNetworkPoliciesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_network_policies(request) + response = await client.update_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + assert args[0] == vmwareengine.UpdateExternalAccessRuleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworkPoliciesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_network_policies_async_from_dict(): - await test_list_network_policies_async(request_type=dict) +async def test_update_external_access_rule_async_from_dict(): + await test_update_external_access_rule_async(request_type=dict) -def test_list_network_policies_field_headers(): +def test_update_external_access_rule_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListNetworkPoliciesRequest() + request = vmwareengine.UpdateExternalAccessRuleRequest() - request.parent = "parent_value" + request.external_access_rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: - call.return_value = vmwareengine.ListNetworkPoliciesResponse() - client.list_network_policies(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7746,30 +8418,30 @@ def test_list_network_policies_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "external_access_rule.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_network_policies_field_headers_async(): +async def test_update_external_access_rule_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListNetworkPoliciesRequest() + request = vmwareengine.UpdateExternalAccessRuleRequest() - request.parent = "parent_value" + request.external_access_rule.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNetworkPoliciesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_network_policies(request) + await client.update_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7780,37 +8452,43 @@ async def test_list_network_policies_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "external_access_rule.name=name_value", ) in kw["metadata"] -def test_list_network_policies_flattened(): +def test_update_external_access_rule_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNetworkPoliciesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_network_policies( - parent="parent_value", + client.update_external_access_rule( + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].external_access_rule + mock_val = vmwareengine_resources.ExternalAccessRule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_network_policies_flattened_error(): +def test_update_external_access_rule_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7818,45 +8496,54 @@ def test_list_network_policies_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_network_policies( - vmwareengine.ListNetworkPoliciesRequest(), - parent="parent_value", + client.update_external_access_rule( + vmwareengine.UpdateExternalAccessRuleRequest(), + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_network_policies_flattened_async(): +async def test_update_external_access_rule_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.update_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListNetworkPoliciesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListNetworkPoliciesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_network_policies( - parent="parent_value", + response = await client.update_external_access_rule( + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].external_access_rule + mock_val = vmwareengine_resources.ExternalAccessRule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_network_policies_flattened_error_async(): +async def test_update_external_access_rule_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7864,319 +8551,122 @@ async def test_list_network_policies_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_network_policies( - vmwareengine.ListNetworkPoliciesRequest(), - parent="parent_value", + await client.update_external_access_rule( + vmwareengine.UpdateExternalAccessRuleRequest(), + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_network_policies_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteExternalAccessRuleRequest, + dict, + ], +) +def test_delete_external_access_rule(request_type, transport: str = "grpc"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="abc", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[], - next_page_token="def", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_network_policies(request={}) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_external_access_rule(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteExternalAccessRuleRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_network_policies_pages(transport_name: str = "grpc"): +def test_delete_external_access_rule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="abc", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[], - next_page_token="def", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_network_policies(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.delete_external_access_rule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteExternalAccessRuleRequest() @pytest.mark.asyncio -async def test_list_network_policies_async_pager(): +async def test_delete_external_access_rule_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeleteExternalAccessRuleRequest, +): client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.delete_external_access_rule), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="abc", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[], - next_page_token="def", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_network_policies( - request={}, + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + response = await client.delete_external_access_rule(request) - assert len(responses) == 6 - assert all( - isinstance(i, vmwareengine_resources.NetworkPolicy) for i in responses - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteExternalAccessRuleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_network_policies_async_pages(): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, +async def test_delete_external_access_rule_async_from_dict(): + await test_delete_external_access_rule_async(request_type=dict) + + +def test_delete_external_access_rule_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteExternalAccessRuleRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_network_policies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="abc", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[], - next_page_token="def", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_network_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.CreateNetworkPolicyRequest, - dict, - ], -) -def test_create_network_policy(request_type, transport: str = "grpc"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_network_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_network_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateNetworkPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_network_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_network_policy), "__call__" - ) as call: - client.create_network_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateNetworkPolicyRequest() - - -@pytest.mark.asyncio -async def test_create_network_policy_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.CreateNetworkPolicyRequest, -): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_network_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_network_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateNetworkPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_network_policy_async_from_dict(): - await test_create_network_policy_async(request_type=dict) - - -def test_create_network_policy_field_headers(): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = vmwareengine.CreateNetworkPolicyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_network_policy), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_network_policy(request) + client.delete_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8187,30 +8677,30 @@ def test_create_network_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_network_policy_field_headers_async(): +async def test_delete_external_access_rule_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreateNetworkPolicyRequest() + request = vmwareengine.DeleteExternalAccessRuleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_network_policy), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_network_policy(request) + await client.delete_external_access_rule(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8221,45 +8711,37 @@ async def test_create_network_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_network_policy_flattened(): +def test_delete_external_access_rule_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_network_policy), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_network_policy( - parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + client.delete_external_access_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].network_policy - mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") - assert arg == mock_val - arg = args[0].network_policy_id - mock_val = "network_policy_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_network_policy_flattened_error(): +def test_delete_external_access_rule_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8267,23 +8749,21 @@ def test_create_network_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_network_policy( - vmwareengine.CreateNetworkPolicyRequest(), - parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + client.delete_external_access_rule( + vmwareengine.DeleteExternalAccessRuleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_network_policy_flattened_async(): +async def test_delete_external_access_rule_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_network_policy), "__call__" + type(client.transport.delete_external_access_rule), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -8293,29 +8773,21 @@ async def test_create_network_policy_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_network_policy( - parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + response = await client.delete_external_access_rule( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].network_policy - mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") - assert arg == mock_val - arg = args[0].network_policy_id - mock_val = "network_policy_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_network_policy_flattened_error_async(): +async def test_delete_external_access_rule_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8323,22 +8795,20 @@ async def test_create_network_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_network_policy( - vmwareengine.CreateNetworkPolicyRequest(), - parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + await client.delete_external_access_rule( + vmwareengine.DeleteExternalAccessRuleRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateNetworkPolicyRequest, + vmwareengine.ListLoggingServersRequest, dict, ], ) -def test_update_network_policy(request_type, transport: str = "grpc"): +def test_list_logging_servers(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8350,22 +8820,27 @@ def test_update_network_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_network_policy(request) + call.return_value = vmwareengine.ListLoggingServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_logging_servers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + assert args[0] == vmwareengine.ListLoggingServersRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListLoggingServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_network_policy_empty_call(): +def test_list_logging_servers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -8375,18 +8850,17 @@ def test_update_network_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: - client.update_network_policy() + client.list_logging_servers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + assert args[0] == vmwareengine.ListLoggingServersRequest() @pytest.mark.asyncio -async def test_update_network_policy_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.UpdateNetworkPolicyRequest, +async def test_list_logging_servers_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ListLoggingServersRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8399,45 +8873,50 @@ async def test_update_network_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListLoggingServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.update_network_policy(request) + response = await client.list_logging_servers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + assert args[0] == vmwareengine.ListLoggingServersRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListLoggingServersAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_network_policy_async_from_dict(): - await test_update_network_policy_async(request_type=dict) +async def test_list_logging_servers_async_from_dict(): + await test_list_logging_servers_async(request_type=dict) -def test_update_network_policy_field_headers(): +def test_list_logging_servers_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateNetworkPolicyRequest() + request = vmwareengine.ListLoggingServersRequest() - request.network_policy.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_network_policy(request) + call.return_value = vmwareengine.ListLoggingServersResponse() + client.list_logging_servers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8448,30 +8927,30 @@ def test_update_network_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "network_policy.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_network_policy_field_headers_async(): +async def test_list_logging_servers_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateNetworkPolicyRequest() + request = vmwareengine.ListLoggingServersRequest() - request.network_policy.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine.ListLoggingServersResponse() ) - await client.update_network_policy(request) + await client.list_logging_servers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8482,41 +8961,37 @@ async def test_update_network_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "network_policy.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_network_policy_flattened(): +def test_list_logging_servers_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListLoggingServersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_network_policy( - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_logging_servers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].network_policy - mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_network_policy_flattened_error(): +def test_list_logging_servers_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8524,50 +8999,45 @@ def test_update_network_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_network_policy( - vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_logging_servers( + vmwareengine.ListLoggingServersRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_network_policy_flattened_async(): +async def test_list_logging_servers_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_network_policy), "__call__" + type(client.transport.list_logging_servers), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine.ListLoggingServersResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine.ListLoggingServersResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_network_policy( - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_logging_servers( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].network_policy - mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_network_policy_flattened_error_async(): +async def test_list_logging_servers_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8575,21 +9045,220 @@ async def test_update_network_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_network_policy( - vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_logging_servers( + vmwareengine.ListLoggingServersRequest(), + parent="parent_value", + ) + + +def test_list_logging_servers_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logging_servers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + next_page_token="abc", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[], + next_page_token="def", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + ], + next_page_token="ghi", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_logging_servers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.LoggingServer) for i in results) + + +def test_list_logging_servers_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logging_servers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + next_page_token="abc", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[], + next_page_token="def", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + ], + next_page_token="ghi", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + ), + RuntimeError, + ) + pages = list(client.list_logging_servers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_logging_servers_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logging_servers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + next_page_token="abc", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[], + next_page_token="def", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + ], + next_page_token="ghi", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_logging_servers( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.LoggingServer) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_logging_servers_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logging_servers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + next_page_token="abc", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[], + next_page_token="def", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + ], + next_page_token="ghi", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_logging_servers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeleteNetworkPolicyRequest, + vmwareengine.GetLoggingServerRequest, dict, ], ) -def test_delete_network_policy(request_type, transport: str = "grpc"): +def test_get_logging_server(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8601,22 +9270,35 @@ def test_delete_network_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_network_policy(request) + call.return_value = vmwareengine_resources.LoggingServer( + name="name_value", + hostname="hostname_value", + port=453, + protocol=vmwareengine_resources.LoggingServer.Protocol.UDP, + source_type=vmwareengine_resources.LoggingServer.SourceType.ESXI, + uid="uid_value", + ) + response = client.get_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + assert args[0] == vmwareengine.GetLoggingServerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.LoggingServer) + assert response.name == "name_value" + assert response.hostname == "hostname_value" + assert response.port == 453 + assert response.protocol == vmwareengine_resources.LoggingServer.Protocol.UDP + assert response.source_type == vmwareengine_resources.LoggingServer.SourceType.ESXI + assert response.uid == "uid_value" -def test_delete_network_policy_empty_call(): +def test_get_logging_server_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -8626,18 +9308,17 @@ def test_delete_network_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: - client.delete_network_policy() + client.get_logging_server() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + assert args[0] == vmwareengine.GetLoggingServerRequest() @pytest.mark.asyncio -async def test_delete_network_policy_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.DeleteNetworkPolicyRequest, +async def test_get_logging_server_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetLoggingServerRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8650,45 +9331,58 @@ async def test_delete_network_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.LoggingServer( + name="name_value", + hostname="hostname_value", + port=453, + protocol=vmwareengine_resources.LoggingServer.Protocol.UDP, + source_type=vmwareengine_resources.LoggingServer.SourceType.ESXI, + uid="uid_value", + ) ) - response = await client.delete_network_policy(request) + response = await client.get_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + assert args[0] == vmwareengine.GetLoggingServerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.LoggingServer) + assert response.name == "name_value" + assert response.hostname == "hostname_value" + assert response.port == 453 + assert response.protocol == vmwareengine_resources.LoggingServer.Protocol.UDP + assert response.source_type == vmwareengine_resources.LoggingServer.SourceType.ESXI + assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_delete_network_policy_async_from_dict(): - await test_delete_network_policy_async(request_type=dict) +async def test_get_logging_server_async_from_dict(): + await test_get_logging_server_async(request_type=dict) -def test_delete_network_policy_field_headers(): +def test_get_logging_server_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeleteNetworkPolicyRequest() + request = vmwareengine.GetLoggingServerRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_network_policy(request) + call.return_value = vmwareengine_resources.LoggingServer() + client.get_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8704,25 +9398,25 @@ def test_delete_network_policy_field_headers(): @pytest.mark.asyncio -async def test_delete_network_policy_field_headers_async(): +async def test_get_logging_server_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeleteNetworkPolicyRequest() + request = vmwareengine.GetLoggingServerRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine_resources.LoggingServer() ) - await client.delete_network_policy(request) + await client.get_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8737,20 +9431,20 @@ async def test_delete_network_policy_field_headers_async(): ) in kw["metadata"] -def test_delete_network_policy_flattened(): +def test_get_logging_server_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.LoggingServer() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_network_policy( + client.get_logging_server( name="name_value", ) @@ -8763,7 +9457,7 @@ def test_delete_network_policy_flattened(): assert arg == mock_val -def test_delete_network_policy_flattened_error(): +def test_get_logging_server_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8771,31 +9465,31 @@ def test_delete_network_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_network_policy( - vmwareengine.DeleteNetworkPolicyRequest(), + client.get_logging_server( + vmwareengine.GetLoggingServerRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_network_policy_flattened_async(): +async def test_get_logging_server_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_network_policy), "__call__" + type(client.transport.get_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.LoggingServer() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.LoggingServer() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_network_policy( + response = await client.get_logging_server( name="name_value", ) @@ -8809,7 +9503,7 @@ async def test_delete_network_policy_flattened_async(): @pytest.mark.asyncio -async def test_delete_network_policy_flattened_error_async(): +async def test_get_logging_server_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8817,8 +9511,8 @@ async def test_delete_network_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_network_policy( - vmwareengine.DeleteNetworkPolicyRequest(), + await client.get_logging_server( + vmwareengine.GetLoggingServerRequest(), name="name_value", ) @@ -8826,11 +9520,11 @@ async def test_delete_network_policy_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateVmwareEngineNetworkRequest, + vmwareengine.CreateLoggingServerRequest, dict, ], ) -def test_create_vmware_engine_network(request_type, transport: str = "grpc"): +def test_create_logging_server(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8842,22 +9536,22 @@ def test_create_vmware_engine_network(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_vmware_engine_network(request) + response = client.create_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.CreateLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_vmware_engine_network_empty_call(): +def test_create_logging_server_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -8867,18 +9561,18 @@ def test_create_vmware_engine_network_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: - client.create_vmware_engine_network() + client.create_logging_server() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.CreateLoggingServerRequest() @pytest.mark.asyncio -async def test_create_vmware_engine_network_async( +async def test_create_logging_server_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.CreateVmwareEngineNetworkRequest, + request_type=vmwareengine.CreateLoggingServerRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8891,45 +9585,45 @@ async def test_create_vmware_engine_network_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_vmware_engine_network(request) + response = await client.create_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.CreateLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_create_vmware_engine_network_async_from_dict(): - await test_create_vmware_engine_network_async(request_type=dict) +async def test_create_logging_server_async_from_dict(): + await test_create_logging_server_async(request_type=dict) -def test_create_vmware_engine_network_field_headers(): +def test_create_logging_server_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreateVmwareEngineNetworkRequest() + request = vmwareengine.CreateLoggingServerRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_vmware_engine_network(request) + client.create_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8945,25 +9639,25 @@ def test_create_vmware_engine_network_field_headers(): @pytest.mark.asyncio -async def test_create_vmware_engine_network_field_headers_async(): +async def test_create_logging_server_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreateVmwareEngineNetworkRequest() + request = vmwareengine.CreateLoggingServerRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_vmware_engine_network(request) + await client.create_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8978,25 +9672,23 @@ async def test_create_vmware_engine_network_field_headers_async(): ) in kw["metadata"] -def test_create_vmware_engine_network_flattened(): +def test_create_logging_server_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_vmware_engine_network( + client.create_logging_server( parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", ) # Establish that the underlying call was made with the expected @@ -9006,15 +9698,15 @@ def test_create_vmware_engine_network_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].vmware_engine_network - mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + arg = args[0].logging_server + mock_val = vmwareengine_resources.LoggingServer(name="name_value") assert arg == mock_val - arg = args[0].vmware_engine_network_id - mock_val = "vmware_engine_network_id_value" + arg = args[0].logging_server_id + mock_val = "logging_server_id_value" assert arg == mock_val -def test_create_vmware_engine_network_flattened_error(): +def test_create_logging_server_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9022,25 +9714,23 @@ def test_create_vmware_engine_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_vmware_engine_network( - vmwareengine.CreateVmwareEngineNetworkRequest(), + client.create_logging_server( + vmwareengine.CreateLoggingServerRequest(), parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", ) @pytest.mark.asyncio -async def test_create_vmware_engine_network_flattened_async(): +async def test_create_logging_server_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_vmware_engine_network), "__call__" + type(client.transport.create_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9050,12 +9740,10 @@ async def test_create_vmware_engine_network_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_vmware_engine_network( + response = await client.create_logging_server( parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", ) # Establish that the underlying call was made with the expected @@ -9065,16 +9753,16 @@ async def test_create_vmware_engine_network_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].vmware_engine_network - mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + arg = args[0].logging_server + mock_val = vmwareengine_resources.LoggingServer(name="name_value") assert arg == mock_val - arg = args[0].vmware_engine_network_id - mock_val = "vmware_engine_network_id_value" + arg = args[0].logging_server_id + mock_val = "logging_server_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_vmware_engine_network_flattened_error_async(): +async def test_create_logging_server_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9082,24 +9770,22 @@ async def test_create_vmware_engine_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_vmware_engine_network( - vmwareengine.CreateVmwareEngineNetworkRequest(), + await client.create_logging_server( + vmwareengine.CreateLoggingServerRequest(), parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateVmwareEngineNetworkRequest, + vmwareengine.UpdateLoggingServerRequest, dict, ], ) -def test_update_vmware_engine_network(request_type, transport: str = "grpc"): +def test_update_logging_server(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9111,22 +9797,22 @@ def test_update_vmware_engine_network(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_vmware_engine_network(request) + response = client.update_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.UpdateLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_vmware_engine_network_empty_call(): +def test_update_logging_server_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -9136,18 +9822,18 @@ def test_update_vmware_engine_network_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: - client.update_vmware_engine_network() + client.update_logging_server() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.UpdateLoggingServerRequest() @pytest.mark.asyncio -async def test_update_vmware_engine_network_async( +async def test_update_logging_server_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.UpdateVmwareEngineNetworkRequest, + request_type=vmwareengine.UpdateLoggingServerRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9160,45 +9846,45 @@ async def test_update_vmware_engine_network_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_vmware_engine_network(request) + response = await client.update_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.UpdateLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_vmware_engine_network_async_from_dict(): - await test_update_vmware_engine_network_async(request_type=dict) +async def test_update_logging_server_async_from_dict(): + await test_update_logging_server_async(request_type=dict) -def test_update_vmware_engine_network_field_headers(): +def test_update_logging_server_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateVmwareEngineNetworkRequest() + request = vmwareengine.UpdateLoggingServerRequest() - request.vmware_engine_network.name = "name_value" + request.logging_server.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_vmware_engine_network(request) + client.update_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9209,30 +9895,30 @@ def test_update_vmware_engine_network_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "vmware_engine_network.name=name_value", + "logging_server.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_vmware_engine_network_field_headers_async(): +async def test_update_logging_server_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdateVmwareEngineNetworkRequest() + request = vmwareengine.UpdateLoggingServerRequest() - request.vmware_engine_network.name = "name_value" + request.logging_server.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_vmware_engine_network(request) + await client.update_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9243,27 +9929,25 @@ async def test_update_vmware_engine_network_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "vmware_engine_network.name=name_value", + "logging_server.name=name_value", ) in kw["metadata"] -def test_update_vmware_engine_network_flattened(): +def test_update_logging_server_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_vmware_engine_network( - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), + client.update_logging_server( + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9271,15 +9955,15 @@ def test_update_vmware_engine_network_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].vmware_engine_network - mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + arg = args[0].logging_server + mock_val = vmwareengine_resources.LoggingServer(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_vmware_engine_network_flattened_error(): +def test_update_logging_server_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9287,24 +9971,22 @@ def test_update_vmware_engine_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_vmware_engine_network( - vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), + client.update_logging_server( + vmwareengine.UpdateLoggingServerRequest(), + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_vmware_engine_network_flattened_async(): +async def test_update_logging_server_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_vmware_engine_network), "__call__" + type(client.transport.update_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9314,10 +9996,8 @@ async def test_update_vmware_engine_network_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_vmware_engine_network( - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), + response = await client.update_logging_server( + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9325,8 +10005,8 @@ async def test_update_vmware_engine_network_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].vmware_engine_network - mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + arg = args[0].logging_server + mock_val = vmwareengine_resources.LoggingServer(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -9334,7 +10014,7 @@ async def test_update_vmware_engine_network_flattened_async(): @pytest.mark.asyncio -async def test_update_vmware_engine_network_flattened_error_async(): +async def test_update_logging_server_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9342,11 +10022,9 @@ async def test_update_vmware_engine_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_vmware_engine_network( - vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), + await client.update_logging_server( + vmwareengine.UpdateLoggingServerRequest(), + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9354,11 +10032,11 @@ async def test_update_vmware_engine_network_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeleteVmwareEngineNetworkRequest, + vmwareengine.DeleteLoggingServerRequest, dict, ], ) -def test_delete_vmware_engine_network(request_type, transport: str = "grpc"): +def test_delete_logging_server(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9370,22 +10048,22 @@ def test_delete_vmware_engine_network(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_vmware_engine_network(request) + response = client.delete_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.DeleteLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_vmware_engine_network_empty_call(): +def test_delete_logging_server_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -9395,18 +10073,18 @@ def test_delete_vmware_engine_network_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: - client.delete_vmware_engine_network() + client.delete_logging_server() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.DeleteLoggingServerRequest() @pytest.mark.asyncio -async def test_delete_vmware_engine_network_async( +async def test_delete_logging_server_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.DeleteVmwareEngineNetworkRequest, + request_type=vmwareengine.DeleteLoggingServerRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9419,45 +10097,45 @@ async def test_delete_vmware_engine_network_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_vmware_engine_network(request) + response = await client.delete_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.DeleteLoggingServerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_delete_vmware_engine_network_async_from_dict(): - await test_delete_vmware_engine_network_async(request_type=dict) +async def test_delete_logging_server_async_from_dict(): + await test_delete_logging_server_async(request_type=dict) -def test_delete_vmware_engine_network_field_headers(): +def test_delete_logging_server_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeleteVmwareEngineNetworkRequest() + request = vmwareengine.DeleteLoggingServerRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_vmware_engine_network(request) + client.delete_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9473,25 +10151,25 @@ def test_delete_vmware_engine_network_field_headers(): @pytest.mark.asyncio -async def test_delete_vmware_engine_network_field_headers_async(): +async def test_delete_logging_server_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeleteVmwareEngineNetworkRequest() + request = vmwareengine.DeleteLoggingServerRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_vmware_engine_network(request) + await client.delete_logging_server(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9506,20 +10184,20 @@ async def test_delete_vmware_engine_network_field_headers_async(): ) in kw["metadata"] -def test_delete_vmware_engine_network_flattened(): +def test_delete_logging_server_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_vmware_engine_network( + client.delete_logging_server( name="name_value", ) @@ -9532,7 +10210,7 @@ def test_delete_vmware_engine_network_flattened(): assert arg == mock_val -def test_delete_vmware_engine_network_flattened_error(): +def test_delete_logging_server_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9540,21 +10218,21 @@ def test_delete_vmware_engine_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_vmware_engine_network( - vmwareengine.DeleteVmwareEngineNetworkRequest(), + client.delete_logging_server( + vmwareengine.DeleteLoggingServerRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_vmware_engine_network_flattened_async(): +async def test_delete_logging_server_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_vmware_engine_network), "__call__" + type(client.transport.delete_logging_server), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9564,7 +10242,7 @@ async def test_delete_vmware_engine_network_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_vmware_engine_network( + response = await client.delete_logging_server( name="name_value", ) @@ -9578,7 +10256,7 @@ async def test_delete_vmware_engine_network_flattened_async(): @pytest.mark.asyncio -async def test_delete_vmware_engine_network_flattened_error_async(): +async def test_delete_logging_server_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9586,8 +10264,8 @@ async def test_delete_vmware_engine_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_vmware_engine_network( - vmwareengine.DeleteVmwareEngineNetworkRequest(), + await client.delete_logging_server( + vmwareengine.DeleteLoggingServerRequest(), name="name_value", ) @@ -9595,11 +10273,11 @@ async def test_delete_vmware_engine_network_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetVmwareEngineNetworkRequest, + vmwareengine.ListNodeTypesRequest, dict, ], ) -def test_get_vmware_engine_network(request_type, transport: str = "grpc"): +def test_list_node_types(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9610,36 +10288,26 @@ def test_get_vmware_engine_network(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.VmwareEngineNetwork( - name="name_value", - description="description_value", - state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, - uid="uid_value", - etag="etag_value", + call.return_value = vmwareengine.ListNodeTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.get_vmware_engine_network(request) + response = client.list_node_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.ListNodeTypesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY - assert response.uid == "uid_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListNodeTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_vmware_engine_network_empty_call(): +def test_list_node_types_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -9648,19 +10316,16 @@ def test_get_vmware_engine_network_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: - client.get_vmware_engine_network() + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + client.list_node_types() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.ListNodeTypesRequest() @pytest.mark.asyncio -async def test_get_vmware_engine_network_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.GetVmwareEngineNetworkRequest, +async def test_list_node_types_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ListNodeTypesRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9672,59 +10337,47 @@ async def test_get_vmware_engine_network_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.VmwareEngineNetwork( - name="name_value", - description="description_value", - state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, - uid="uid_value", - etag="etag_value", + vmwareengine.ListNodeTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_vmware_engine_network(request) + response = await client.list_node_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + assert args[0] == vmwareengine.ListNodeTypesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY - assert response.uid == "uid_value" - assert response.etag == "etag_value" + assert isinstance(response, pagers.ListNodeTypesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_vmware_engine_network_async_from_dict(): - await test_get_vmware_engine_network_async(request_type=dict) +async def test_list_node_types_async_from_dict(): + await test_list_node_types_async(request_type=dict) -def test_get_vmware_engine_network_field_headers(): +def test_list_node_types_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetVmwareEngineNetworkRequest() + request = vmwareengine.ListNodeTypesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: - call.return_value = vmwareengine_resources.VmwareEngineNetwork() - client.get_vmware_engine_network(request) + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + call.return_value = vmwareengine.ListNodeTypesResponse() + client.list_node_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9735,30 +10388,28 @@ def test_get_vmware_engine_network_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_vmware_engine_network_field_headers_async(): +async def test_list_node_types_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetVmwareEngineNetworkRequest() + request = vmwareengine.ListNodeTypesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.VmwareEngineNetwork() + vmwareengine.ListNodeTypesResponse() ) - await client.get_vmware_engine_network(request) + await client.list_node_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9769,37 +10420,35 @@ async def test_get_vmware_engine_network_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_vmware_engine_network_flattened(): +def test_list_node_types_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.VmwareEngineNetwork() + call.return_value = vmwareengine.ListNodeTypesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_vmware_engine_network( - name="name_value", + client.list_node_types( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_vmware_engine_network_flattened_error(): +def test_list_node_types_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9807,45 +10456,43 @@ def test_get_vmware_engine_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_vmware_engine_network( - vmwareengine.GetVmwareEngineNetworkRequest(), - name="name_value", + client.list_node_types( + vmwareengine.ListNodeTypesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_vmware_engine_network_flattened_async(): +async def test_list_node_types_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_vmware_engine_network), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.VmwareEngineNetwork() + call.return_value = vmwareengine.ListNodeTypesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.VmwareEngineNetwork() + vmwareengine.ListNodeTypesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_vmware_engine_network( - name="name_value", + response = await client.list_node_types( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_vmware_engine_network_flattened_error_async(): +async def test_list_node_types_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9853,20 +10500,210 @@ async def test_get_vmware_engine_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_vmware_engine_network( - vmwareengine.GetVmwareEngineNetworkRequest(), - name="name_value", + await client.list_node_types( + vmwareengine.ListNodeTypesRequest(), + parent="parent_value", + ) + + +def test_list_node_types_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[], + next_page_token="def", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_node_types(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) + + +def test_list_node_types_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_types), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[], + next_page_token="def", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_node_types(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_node_types_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_node_types), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[], + next_page_token="def", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_node_types( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in responses) + + +@pytest.mark.asyncio +async def test_list_node_types_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_node_types), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[], + next_page_token="def", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_node_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListVmwareEngineNetworksRequest, + vmwareengine.GetNodeTypeRequest, dict, ], ) -def test_list_vmware_engine_networks(request_type, transport: str = "grpc"): +def test_get_node_type(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9877,28 +10714,48 @@ def test_list_vmware_engine_networks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListVmwareEngineNetworksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = vmwareengine_resources.NodeType( + name="name_value", + node_type_id="node_type_id_value", + display_name="display_name_value", + virtual_cpu_count=1846, + total_core_count=1716, + memory_gb=961, + disk_size_gb=1261, + available_custom_core_counts=[2974], + kind=vmwareengine_resources.NodeType.Kind.STANDARD, + families=["families_value"], + capabilities=[ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ], ) - response = client.list_vmware_engine_networks(request) + response = client.get_node_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + assert args[0] == vmwareengine.GetNodeTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVmwareEngineNetworksPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, vmwareengine_resources.NodeType) + assert response.name == "name_value" + assert response.node_type_id == "node_type_id_value" + assert response.display_name == "display_name_value" + assert response.virtual_cpu_count == 1846 + assert response.total_core_count == 1716 + assert response.memory_gb == 961 + assert response.disk_size_gb == 1261 + assert response.available_custom_core_counts == [2974] + assert response.kind == vmwareengine_resources.NodeType.Kind.STANDARD + assert response.families == ["families_value"] + assert response.capabilities == [ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ] -def test_list_vmware_engine_networks_empty_call(): +def test_get_node_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -9907,19 +10764,16 @@ def test_list_vmware_engine_networks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: - client.list_vmware_engine_networks() + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + client.get_node_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + assert args[0] == vmwareengine.GetNodeTypeRequest() @pytest.mark.asyncio -async def test_list_vmware_engine_networks_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.ListVmwareEngineNetworksRequest, +async def test_get_node_type_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetNodeTypeRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9931,51 +10785,69 @@ async def test_list_vmware_engine_networks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListVmwareEngineNetworksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + vmwareengine_resources.NodeType( + name="name_value", + node_type_id="node_type_id_value", + display_name="display_name_value", + virtual_cpu_count=1846, + total_core_count=1716, + memory_gb=961, + disk_size_gb=1261, + available_custom_core_counts=[2974], + kind=vmwareengine_resources.NodeType.Kind.STANDARD, + families=["families_value"], + capabilities=[ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ], + ) ) - response = await client.list_vmware_engine_networks(request) + response = await client.get_node_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + assert args[0] == vmwareengine.GetNodeTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVmwareEngineNetworksAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, vmwareengine_resources.NodeType) + assert response.name == "name_value" + assert response.node_type_id == "node_type_id_value" + assert response.display_name == "display_name_value" + assert response.virtual_cpu_count == 1846 + assert response.total_core_count == 1716 + assert response.memory_gb == 961 + assert response.disk_size_gb == 1261 + assert response.available_custom_core_counts == [2974] + assert response.kind == vmwareengine_resources.NodeType.Kind.STANDARD + assert response.families == ["families_value"] + assert response.capabilities == [ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ] @pytest.mark.asyncio -async def test_list_vmware_engine_networks_async_from_dict(): - await test_list_vmware_engine_networks_async(request_type=dict) +async def test_get_node_type_async_from_dict(): + await test_get_node_type_async(request_type=dict) -def test_list_vmware_engine_networks_field_headers(): +def test_get_node_type_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListVmwareEngineNetworksRequest() + request = vmwareengine.GetNodeTypeRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: - call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() - client.list_vmware_engine_networks(request) + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: + call.return_value = vmwareengine_resources.NodeType() + client.get_node_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9986,30 +10858,28 @@ def test_list_vmware_engine_networks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_vmware_engine_networks_field_headers_async(): +async def test_get_node_type_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListVmwareEngineNetworksRequest() + request = vmwareengine.GetNodeTypeRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListVmwareEngineNetworksResponse() + vmwareengine_resources.NodeType() ) - await client.list_vmware_engine_networks(request) + await client.get_node_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10020,37 +10890,35 @@ async def test_list_vmware_engine_networks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_vmware_engine_networks_flattened(): +def test_get_node_type_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + call.return_value = vmwareengine_resources.NodeType() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_vmware_engine_networks( - parent="parent_value", + client.get_node_type( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_vmware_engine_networks_flattened_error(): +def test_get_node_type_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10058,45 +10926,43 @@ def test_list_vmware_engine_networks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_vmware_engine_networks( - vmwareengine.ListVmwareEngineNetworksRequest(), - parent="parent_value", + client.get_node_type( + vmwareengine.GetNodeTypeRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_vmware_engine_networks_flattened_async(): +async def test_get_node_type_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_node_type), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + call.return_value = vmwareengine_resources.NodeType() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListVmwareEngineNetworksResponse() + vmwareengine_resources.NodeType() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_vmware_engine_networks( - parent="parent_value", + response = await client.get_node_type( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_vmware_engine_networks_flattened_error_async(): +async def test_get_node_type_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10104,321 +10970,128 @@ async def test_list_vmware_engine_networks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_vmware_engine_networks( - vmwareengine.ListVmwareEngineNetworksRequest(), - parent="parent_value", + await client.get_node_type( + vmwareengine.GetNodeTypeRequest(), + name="name_value", ) -def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ShowNsxCredentialsRequest, + dict, + ], +) +def test_show_nsx_credentials(request_type, transport: str = "grpc"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="abc", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[], - next_page_token="def", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="ghi", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.Credentials( + username="username_value", + password="password_value", ) - pager = client.list_vmware_engine_networks(request={}) + response = client.show_nsx_credentials(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ShowNsxCredentialsRequest() - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results - ) + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" -def test_list_vmware_engine_networks_pages(transport_name: str = "grpc"): +def test_show_nsx_credentials_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_vmware_engine_networks), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="abc", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[], - next_page_token="def", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="ghi", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - ), - RuntimeError, - ) - pages = list(client.list_vmware_engine_networks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.show_nsx_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ShowNsxCredentialsRequest() @pytest.mark.asyncio -async def test_list_vmware_engine_networks_async_pager(): +async def test_show_nsx_credentials_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ShowNsxCredentialsRequest +): client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_vmware_engine_networks), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="abc", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[], - next_page_token="def", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="ghi", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_vmware_engine_networks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_vmware_engine_networks_async_pages(): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_vmware_engine_networks), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="abc", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[], - next_page_token="def", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - ], - next_page_token="ghi", - ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_vmware_engine_networks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.CreatePrivateConnectionRequest, - dict, - ], -) -def test_create_private_connection(request_type, transport: str = "grpc"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreatePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_private_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), "__call__" - ) as call: - client.create_private_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreatePrivateConnectionRequest() - - -@pytest.mark.asyncio -async def test_create_private_connection_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.CreatePrivateConnectionRequest, -): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_private_connection), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.Credentials( + username="username_value", + password="password_value", + ) ) - response = await client.create_private_connection(request) + response = await client.show_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.CreatePrivateConnectionRequest() + assert args[0] == vmwareengine.ShowNsxCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" @pytest.mark.asyncio -async def test_create_private_connection_async_from_dict(): - await test_create_private_connection_async(request_type=dict) +async def test_show_nsx_credentials_async_from_dict(): + await test_show_nsx_credentials_async(request_type=dict) -def test_create_private_connection_field_headers(): +def test_show_nsx_credentials_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreatePrivateConnectionRequest() + request = vmwareengine.ShowNsxCredentialsRequest() - request.parent = "parent_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_private_connection), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_private_connection(request) + call.return_value = vmwareengine_resources.Credentials() + client.show_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10429,30 +11102,30 @@ def test_create_private_connection_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_private_connection_field_headers_async(): +async def test_show_nsx_credentials_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.CreatePrivateConnectionRequest() + request = vmwareengine.ShowNsxCredentialsRequest() - request.parent = "parent_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_private_connection), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine_resources.Credentials() ) - await client.create_private_connection(request) + await client.show_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10463,47 +11136,37 @@ async def test_create_private_connection_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] -def test_create_private_connection_flattened(): +def test_show_nsx_credentials_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_private_connection), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.Credentials() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_private_connection( - parent="parent_value", - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - private_connection_id="private_connection_id_value", + client.show_nsx_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].private_connection - mock_val = vmwareengine_resources.PrivateConnection(name="name_value") - assert arg == mock_val - arg = args[0].private_connection_id - mock_val = "private_connection_id_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val -def test_create_private_connection_flattened_error(): +def test_show_nsx_credentials_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10511,59 +11174,45 @@ def test_create_private_connection_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_private_connection( - vmwareengine.CreatePrivateConnectionRequest(), - parent="parent_value", - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - private_connection_id="private_connection_id_value", + client.show_nsx_credentials( + vmwareengine.ShowNsxCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.asyncio -async def test_create_private_connection_flattened_async(): +async def test_show_nsx_credentials_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_private_connection), "__call__" + type(client.transport.show_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.Credentials() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.Credentials() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_private_connection( - parent="parent_value", - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - private_connection_id="private_connection_id_value", + response = await client.show_nsx_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].private_connection - mock_val = vmwareengine_resources.PrivateConnection(name="name_value") - assert arg == mock_val - arg = args[0].private_connection_id - mock_val = "private_connection_id_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_private_connection_flattened_error_async(): +async def test_show_nsx_credentials_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10571,24 +11220,20 @@ async def test_create_private_connection_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_private_connection( - vmwareengine.CreatePrivateConnectionRequest(), - parent="parent_value", - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - private_connection_id="private_connection_id_value", + await client.show_nsx_credentials( + vmwareengine.ShowNsxCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetPrivateConnectionRequest, + vmwareengine.ShowVcenterCredentialsRequest, dict, ], ) -def test_get_private_connection(request_type, transport: str = "grpc"): +def test_show_vcenter_credentials(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10600,57 +11245,27 @@ def test_get_private_connection(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.PrivateConnection( - name="name_value", - description="description_value", - state=vmwareengine_resources.PrivateConnection.State.CREATING, - vmware_engine_network="vmware_engine_network_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, - peering_id="peering_id_value", - routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, - uid="uid_value", - service_network="service_network_value", - peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, + call.return_value = vmwareengine_resources.Credentials( + username="username_value", + password="password_value", ) - response = client.get_private_connection(request) + response = client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetPrivateConnectionRequest() + assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.PrivateConnection) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING - assert response.vmware_engine_network == "vmware_engine_network_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) - assert ( - response.type_ - == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS - ) - assert response.peering_id == "peering_id_value" - assert ( - response.routing_mode - == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL - ) - assert response.uid == "uid_value" - assert response.service_network == "service_network_value" - assert ( - response.peering_state - == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE - ) + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" -def test_get_private_connection_empty_call(): +def test_show_vcenter_credentials_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -10660,18 +11275,18 @@ def test_get_private_connection_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: - client.get_private_connection() + client.show_vcenter_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetPrivateConnectionRequest() + assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() @pytest.mark.asyncio -async def test_get_private_connection_async( +async def test_show_vcenter_credentials_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.GetPrivateConnectionRequest, + request_type=vmwareengine.ShowVcenterCredentialsRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10684,80 +11299,50 @@ async def test_get_private_connection_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.PrivateConnection( - name="name_value", - description="description_value", - state=vmwareengine_resources.PrivateConnection.State.CREATING, - vmware_engine_network="vmware_engine_network_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, - peering_id="peering_id_value", - routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, - uid="uid_value", - service_network="service_network_value", - peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, + vmwareengine_resources.Credentials( + username="username_value", + password="password_value", ) ) - response = await client.get_private_connection(request) + response = await client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.GetPrivateConnectionRequest() + assert args[0] == vmwareengine.ShowVcenterCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.PrivateConnection) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING - assert response.vmware_engine_network == "vmware_engine_network_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) - assert ( - response.type_ - == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS - ) - assert response.peering_id == "peering_id_value" - assert ( - response.routing_mode - == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL - ) - assert response.uid == "uid_value" - assert response.service_network == "service_network_value" - assert ( - response.peering_state - == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE - ) + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" @pytest.mark.asyncio -async def test_get_private_connection_async_from_dict(): - await test_get_private_connection_async(request_type=dict) +async def test_show_vcenter_credentials_async_from_dict(): + await test_show_vcenter_credentials_async(request_type=dict) -def test_get_private_connection_field_headers(): +def test_show_vcenter_credentials_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetPrivateConnectionRequest() + request = vmwareengine.ShowVcenterCredentialsRequest() - request.name = "name_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: - call.return_value = vmwareengine_resources.PrivateConnection() - client.get_private_connection(request) + call.return_value = vmwareengine_resources.Credentials() + client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10768,30 +11353,30 @@ def test_get_private_connection_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_private_connection_field_headers_async(): +async def test_show_vcenter_credentials_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.GetPrivateConnectionRequest() + request = vmwareengine.ShowVcenterCredentialsRequest() - request.name = "name_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.PrivateConnection() + vmwareengine_resources.Credentials() ) - await client.get_private_connection(request) + await client.show_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10802,37 +11387,37 @@ async def test_get_private_connection_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] -def test_get_private_connection_flattened(): +def test_show_vcenter_credentials_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.PrivateConnection() + call.return_value = vmwareengine_resources.Credentials() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_private_connection( - name="name_value", + client.show_vcenter_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val -def test_get_private_connection_flattened_error(): +def test_show_vcenter_credentials_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10840,45 +11425,45 @@ def test_get_private_connection_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_private_connection( - vmwareengine.GetPrivateConnectionRequest(), - name="name_value", + client.show_vcenter_credentials( + vmwareengine.ShowVcenterCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.asyncio -async def test_get_private_connection_flattened_async(): +async def test_show_vcenter_credentials_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_private_connection), "__call__" + type(client.transport.show_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine_resources.PrivateConnection() + call.return_value = vmwareengine_resources.Credentials() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine_resources.PrivateConnection() + vmwareengine_resources.Credentials() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_private_connection( - name="name_value", + response = await client.show_vcenter_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_private_connection_flattened_error_async(): +async def test_show_vcenter_credentials_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10886,20 +11471,20 @@ async def test_get_private_connection_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_private_connection( - vmwareengine.GetPrivateConnectionRequest(), - name="name_value", + await client.show_vcenter_credentials( + vmwareengine.ShowVcenterCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListPrivateConnectionsRequest, + vmwareengine.ResetNsxCredentialsRequest, dict, ], ) -def test_list_private_connections(request_type, transport: str = "grpc"): +def test_reset_nsx_credentials(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10911,27 +11496,22 @@ def test_list_private_connections(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_private_connections(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reset_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + assert args[0] == vmwareengine.ResetNsxCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_private_connections_empty_call(): +def test_reset_nsx_credentials_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -10941,18 +11521,18 @@ def test_list_private_connections_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: - client.list_private_connections() + client.reset_nsx_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + assert args[0] == vmwareengine.ResetNsxCredentialsRequest() @pytest.mark.asyncio -async def test_list_private_connections_async( +async def test_reset_nsx_credentials_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.ListPrivateConnectionsRequest, + request_type=vmwareengine.ResetNsxCredentialsRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10965,50 +11545,45 @@ async def test_list_private_connections_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_private_connections(request) + response = await client.reset_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + assert args[0] == vmwareengine.ResetNsxCredentialsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_private_connections_async_from_dict(): - await test_list_private_connections_async(request_type=dict) +async def test_reset_nsx_credentials_async_from_dict(): + await test_reset_nsx_credentials_async(request_type=dict) -def test_list_private_connections_field_headers(): +def test_reset_nsx_credentials_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListPrivateConnectionsRequest() + request = vmwareengine.ResetNsxCredentialsRequest() - request.parent = "parent_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: - call.return_value = vmwareengine.ListPrivateConnectionsResponse() - client.list_private_connections(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.reset_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11019,30 +11594,30 @@ def test_list_private_connections_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_private_connections_field_headers_async(): +async def test_reset_nsx_credentials_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListPrivateConnectionsRequest() + request = vmwareengine.ResetNsxCredentialsRequest() - request.parent = "parent_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_private_connections(request) + await client.reset_nsx_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11053,37 +11628,37 @@ async def test_list_private_connections_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] -def test_list_private_connections_flattened(): +def test_reset_nsx_credentials_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_private_connections( - parent="parent_value", + client.reset_nsx_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val -def test_list_private_connections_flattened_error(): +def test_reset_nsx_credentials_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11091,45 +11666,45 @@ def test_list_private_connections_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_private_connections( - vmwareengine.ListPrivateConnectionsRequest(), - parent="parent_value", + client.reset_nsx_credentials( + vmwareengine.ResetNsxCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.asyncio -async def test_list_private_connections_flattened_async(): +async def test_reset_nsx_credentials_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_nsx_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_private_connections( - parent="parent_value", + response = await client.reset_nsx_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_private_connections_flattened_error_async(): +async def test_reset_nsx_credentials_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11137,321 +11712,119 @@ async def test_list_private_connections_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_private_connections( - vmwareengine.ListPrivateConnectionsRequest(), - parent="parent_value", + await client.reset_nsx_credentials( + vmwareengine.ResetNsxCredentialsRequest(), + private_cloud="private_cloud_value", ) -def test_list_private_connections_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ResetVcenterCredentialsRequest, + dict, + ], +) +def test_reset_vcenter_credentials(request_type, transport: str = "grpc"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_private_connections(request={}) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reset_vcenter_credentials(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, vmwareengine_resources.PrivateConnection) for i in results - ) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_private_connections_pages(transport_name: str = "grpc"): +def test_reset_vcenter_credentials_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_private_connections(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.reset_vcenter_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() @pytest.mark.asyncio -async def test_list_private_connections_async_pager(): +async def test_reset_vcenter_credentials_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ResetVcenterCredentialsRequest, +): client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connections), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_private_connections( - request={}, + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + response = await client.reset_vcenter_credentials(request) - assert len(responses) == 6 - assert all( - isinstance(i, vmwareengine_resources.PrivateConnection) for i in responses - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ResetVcenterCredentialsRequest() - -@pytest.mark.asyncio -async def test_list_private_connections_async_pages(): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connections), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_private_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.UpdatePrivateConnectionRequest, - dict, - ], -) -def test_update_private_connection(request_type, transport: str = "grpc"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_private_connection), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_private_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_private_connection), "__call__" - ) as call: - client.update_private_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() - - -@pytest.mark.asyncio -async def test_update_private_connection_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.UpdatePrivateConnectionRequest, -): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_private_connection), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_private_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_private_connection_async_from_dict(): - await test_update_private_connection_async(request_type=dict) +async def test_reset_vcenter_credentials_async_from_dict(): + await test_reset_vcenter_credentials_async(request_type=dict) -def test_update_private_connection_field_headers(): +def test_reset_vcenter_credentials_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdatePrivateConnectionRequest() + request = vmwareengine.ResetVcenterCredentialsRequest() - request.private_connection.name = "name_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_private_connection), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_private_connection(request) + client.reset_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11462,30 +11835,30 @@ def test_update_private_connection_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_connection.name=name_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_private_connection_field_headers_async(): +async def test_reset_vcenter_credentials_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.UpdatePrivateConnectionRequest() + request = vmwareengine.ResetVcenterCredentialsRequest() - request.private_connection.name = "name_value" + request.private_cloud = "private_cloud_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_private_connection), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_private_connection(request) + await client.reset_vcenter_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11496,43 +11869,37 @@ async def test_update_private_connection_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "private_connection.name=name_value", + "private_cloud=private_cloud_value", ) in kw["metadata"] -def test_update_private_connection_flattened(): +def test_reset_vcenter_credentials_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_private_connection), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_private_connection( - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.reset_vcenter_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].private_connection - mock_val = vmwareengine_resources.PrivateConnection(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val -def test_update_private_connection_flattened_error(): +def test_reset_vcenter_credentials_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11540,24 +11907,21 @@ def test_update_private_connection_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_private_connection( - vmwareengine.UpdatePrivateConnectionRequest(), - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.reset_vcenter_credentials( + vmwareengine.ResetVcenterCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.asyncio -async def test_update_private_connection_flattened_async(): +async def test_reset_vcenter_credentials_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_private_connection), "__call__" + type(client.transport.reset_vcenter_credentials), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -11567,27 +11931,21 @@ async def test_update_private_connection_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_private_connection( - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.reset_vcenter_credentials( + private_cloud="private_cloud_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].private_connection - mock_val = vmwareengine_resources.PrivateConnection(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].private_cloud + mock_val = "private_cloud_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_private_connection_flattened_error_async(): +async def test_reset_vcenter_credentials_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11595,23 +11953,20 @@ async def test_update_private_connection_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_private_connection( - vmwareengine.UpdatePrivateConnectionRequest(), - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.reset_vcenter_credentials( + vmwareengine.ResetVcenterCredentialsRequest(), + private_cloud="private_cloud_value", ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeletePrivateConnectionRequest, + vmwareengine.GetDnsForwardingRequest, dict, ], ) -def test_delete_private_connection(request_type, transport: str = "grpc"): +def test_get_dns_forwarding(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11623,22 +11978,25 @@ def test_delete_private_connection(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_private_connection(request) + call.return_value = vmwareengine_resources.DnsForwarding( + name="name_value", + ) + response = client.get_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + assert args[0] == vmwareengine.GetDnsForwardingRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.DnsForwarding) + assert response.name == "name_value" -def test_delete_private_connection_empty_call(): +def test_get_dns_forwarding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -11648,18 +12006,17 @@ def test_delete_private_connection_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: - client.delete_private_connection() + client.get_dns_forwarding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + assert args[0] == vmwareengine.GetDnsForwardingRequest() @pytest.mark.asyncio -async def test_delete_private_connection_async( - transport: str = "grpc_asyncio", - request_type=vmwareengine.DeletePrivateConnectionRequest, +async def test_get_dns_forwarding_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetDnsForwardingRequest ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11672,45 +12029,48 @@ async def test_delete_private_connection_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.DnsForwarding( + name="name_value", + ) ) - response = await client.delete_private_connection(request) + response = await client.get_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + assert args[0] == vmwareengine.GetDnsForwardingRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, vmwareengine_resources.DnsForwarding) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_delete_private_connection_async_from_dict(): - await test_delete_private_connection_async(request_type=dict) +async def test_get_dns_forwarding_async_from_dict(): + await test_get_dns_forwarding_async(request_type=dict) -def test_delete_private_connection_field_headers(): +def test_get_dns_forwarding_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeletePrivateConnectionRequest() + request = vmwareengine.GetDnsForwardingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_private_connection(request) + call.return_value = vmwareengine_resources.DnsForwarding() + client.get_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11726,25 +12086,25 @@ def test_delete_private_connection_field_headers(): @pytest.mark.asyncio -async def test_delete_private_connection_field_headers_async(): +async def test_get_dns_forwarding_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.DeletePrivateConnectionRequest() + request = vmwareengine.GetDnsForwardingRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + vmwareengine_resources.DnsForwarding() ) - await client.delete_private_connection(request) + await client.get_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11759,20 +12119,20 @@ async def test_delete_private_connection_field_headers_async(): ) in kw["metadata"] -def test_delete_private_connection_flattened(): +def test_get_dns_forwarding_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.DnsForwarding() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_private_connection( + client.get_dns_forwarding( name="name_value", ) @@ -11785,7 +12145,7 @@ def test_delete_private_connection_flattened(): assert arg == mock_val -def test_delete_private_connection_flattened_error(): +def test_get_dns_forwarding_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11793,31 +12153,31 @@ def test_delete_private_connection_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_private_connection( - vmwareengine.DeletePrivateConnectionRequest(), + client.get_dns_forwarding( + vmwareengine.GetDnsForwardingRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_private_connection_flattened_async(): +async def test_get_dns_forwarding_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_private_connection), "__call__" + type(client.transport.get_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = vmwareengine_resources.DnsForwarding() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vmwareengine_resources.DnsForwarding() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_private_connection( + response = await client.get_dns_forwarding( name="name_value", ) @@ -11831,7 +12191,7 @@ async def test_delete_private_connection_flattened_async(): @pytest.mark.asyncio -async def test_delete_private_connection_flattened_error_async(): +async def test_get_dns_forwarding_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11839,8 +12199,8 @@ async def test_delete_private_connection_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_private_connection( - vmwareengine.DeletePrivateConnectionRequest(), + await client.get_dns_forwarding( + vmwareengine.GetDnsForwardingRequest(), name="name_value", ) @@ -11848,11 +12208,11 @@ async def test_delete_private_connection_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + vmwareengine.UpdateDnsForwardingRequest, dict, ], ) -def test_list_private_connection_peering_routes(request_type, transport: str = "grpc"): +def test_update_dns_forwarding(request_type, transport: str = "grpc"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11864,25 +12224,22 @@ def test_list_private_connection_peering_routes(request_type, transport: str = " # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_private_connection_peering_routes(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + assert args[0] == vmwareengine.UpdateDnsForwardingRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_private_connection_peering_routes_empty_call(): +def test_update_dns_forwarding_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( @@ -11892,18 +12249,18 @@ def test_list_private_connection_peering_routes_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: - client.list_private_connection_peering_routes() + client.update_dns_forwarding() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + assert args[0] == vmwareengine.UpdateDnsForwardingRequest() @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_async( +async def test_update_dns_forwarding_async( transport: str = "grpc_asyncio", - request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + request_type=vmwareengine.UpdateDnsForwardingRequest, ): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11916,48 +12273,45 @@ async def test_list_private_connection_peering_routes_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_private_connection_peering_routes(request) + response = await client.update_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + assert args[0] == vmwareengine.UpdateDnsForwardingRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_async_from_dict(): - await test_list_private_connection_peering_routes_async(request_type=dict) +async def test_update_dns_forwarding_async_from_dict(): + await test_update_dns_forwarding_async(request_type=dict) -def test_list_private_connection_peering_routes_field_headers(): +def test_update_dns_forwarding_field_headers(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + request = vmwareengine.UpdateDnsForwardingRequest() - request.parent = "parent_value" + request.dns_forwarding.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: - call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() - client.list_private_connection_peering_routes(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11968,30 +12322,30 @@ def test_list_private_connection_peering_routes_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "dns_forwarding.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_field_headers_async(): +async def test_update_dns_forwarding_field_headers_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + request = vmwareengine.UpdateDnsForwardingRequest() - request.parent = "parent_value" + request.dns_forwarding.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_private_connection_peering_routes(request) + await client.update_dns_forwarding(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12002,37 +12356,41 @@ async def test_list_private_connection_peering_routes_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "dns_forwarding.name=name_value", ) in kw["metadata"] -def test_list_private_connection_peering_routes_flattened(): +def test_update_dns_forwarding_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_private_connection_peering_routes( - parent="parent_value", + client.update_dns_forwarding( + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].dns_forwarding + mock_val = vmwareengine_resources.DnsForwarding(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_private_connection_peering_routes_flattened_error(): +def test_update_dns_forwarding_flattened_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12040,45 +12398,50 @@ def test_list_private_connection_peering_routes_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_private_connection_peering_routes( - vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), - parent="parent_value", + client.update_dns_forwarding( + vmwareengine.UpdateDnsForwardingRequest(), + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_flattened_async(): +async def test_update_dns_forwarding_flattened_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.update_dns_forwarding), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_private_connection_peering_routes( - parent="parent_value", + response = await client.update_dns_forwarding( + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].dns_forwarding + mock_val = vmwareengine_resources.DnsForwarding(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_flattened_error_async(): +async def test_update_dns_forwarding_flattened_error_async(): client = VmwareEngineAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12086,261 +12449,22076 @@ async def test_list_private_connection_peering_routes_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_private_connection_peering_routes( - vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), - parent="parent_value", + await client.update_dns_forwarding( + vmwareengine.UpdateDnsForwardingRequest(), + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_private_connection_peering_routes_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetNetworkPeeringRequest, + dict, + ], +) +def test_get_network_peering(request_type, transport: str = "grpc"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.get_network_peering), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPeering( + name="name_value", + peer_network="peer_network_value", + export_custom_routes=True, + import_custom_routes=True, + exchange_subnet_routes=True, + export_custom_routes_with_public_ip=True, + import_custom_routes_with_public_ip=True, + state=vmwareengine_resources.NetworkPeering.State.INACTIVE, + state_details="state_details_value", + peer_mtu=865, + peer_network_type=vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD, + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", ) - pager = client.list_private_connection_peering_routes(request={}) + response = client.get_network_peering(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPeeringRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.NetworkPeering) + assert response.name == "name_value" + assert response.peer_network == "peer_network_value" + assert response.export_custom_routes is True + assert response.import_custom_routes is True + assert response.exchange_subnet_routes is True + assert response.export_custom_routes_with_public_ip is True + assert response.import_custom_routes_with_public_ip is True + assert response.state == vmwareengine_resources.NetworkPeering.State.INACTIVE + assert response.state_details == "state_details_value" + assert response.peer_mtu == 865 + assert ( + response.peer_network_type + == vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD + ) + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" -def test_list_private_connection_peering_routes_pages(transport_name: str = "grpc"): +def test_get_network_peering_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), "__call__" + type(client.transport.get_network_peering), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - ), - RuntimeError, - ) - pages = list(client.list_private_connection_peering_routes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.get_network_peering() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPeeringRequest() @pytest.mark.asyncio -async def test_list_private_connection_peering_routes_async_pager(): +async def test_get_network_peering_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetNetworkPeeringRequest +): client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.get_network_peering), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_private_connection_peering_routes( - request={}, + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPeering( + name="name_value", + peer_network="peer_network_value", + export_custom_routes=True, + import_custom_routes=True, + exchange_subnet_routes=True, + export_custom_routes_with_public_ip=True, + import_custom_routes_with_public_ip=True, + state=vmwareengine_resources.NetworkPeering.State.INACTIVE, + state_details="state_details_value", + peer_mtu=865, + peer_network_type=vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD, + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", + ) ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + response = await client.get_network_peering(request) - assert len(responses) == 6 + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.NetworkPeering) + assert response.name == "name_value" + assert response.peer_network == "peer_network_value" + assert response.export_custom_routes is True + assert response.import_custom_routes is True + assert response.exchange_subnet_routes is True + assert response.export_custom_routes_with_public_ip is True + assert response.import_custom_routes_with_public_ip is True + assert response.state == vmwareengine_resources.NetworkPeering.State.INACTIVE + assert response.state_details == "state_details_value" + assert response.peer_mtu == 865 + assert ( + response.peer_network_type + == vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD + ) + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_network_peering_async_from_dict(): + await test_get_network_peering_async(request_type=dict) + + +def test_get_network_peering_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetNetworkPeeringRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_peering), "__call__" + ) as call: + call.return_value = vmwareengine_resources.NetworkPeering() + client.get_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_network_peering_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetNetworkPeeringRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_peering), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPeering() + ) + await client.get_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_network_peering_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPeering() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_network_peering( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_network_peering_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_network_peering( + vmwareengine.GetNetworkPeeringRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_network_peering_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPeering() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPeering() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_network_peering( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_network_peering_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_network_peering( + vmwareengine.GetNetworkPeeringRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListNetworkPeeringsRequest, + dict, + ], +) +def test_list_network_peerings(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPeeringsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_network_peerings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPeeringsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkPeeringsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_network_peerings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + client.list_network_peerings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPeeringsRequest() + + +@pytest.mark.asyncio +async def test_list_network_peerings_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListNetworkPeeringsRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPeeringsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_network_peerings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPeeringsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkPeeringsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_network_peerings_async_from_dict(): + await test_list_network_peerings_async(request_type=dict) + + +def test_list_network_peerings_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListNetworkPeeringsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + call.return_value = vmwareengine.ListNetworkPeeringsResponse() + client.list_network_peerings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_network_peerings_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListNetworkPeeringsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPeeringsResponse() + ) + await client.list_network_peerings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_network_peerings_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPeeringsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_network_peerings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_network_peerings_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_peerings( + vmwareengine.ListNetworkPeeringsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_network_peerings_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPeeringsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPeeringsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_network_peerings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_network_peerings_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_network_peerings( + vmwareengine.ListNetworkPeeringsRequest(), + parent="parent_value", + ) + + +def test_list_network_peerings_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_network_peerings(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.NetworkPeering) for i in results + ) + + +def test_list_network_peerings_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + ), + RuntimeError, + ) + pages = list(client.list_network_peerings(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_network_peerings_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_network_peerings( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.NetworkPeering) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_network_peerings_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_peerings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_network_peerings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateNetworkPeeringRequest, + dict, + ], +) +def test_create_network_peering(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_network_peering_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + client.create_network_peering() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPeeringRequest() + + +@pytest.mark.asyncio +async def test_create_network_peering_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateNetworkPeeringRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_network_peering_async_from_dict(): + await test_create_network_peering_async(request_type=dict) + + +def test_create_network_peering_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateNetworkPeeringRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_network_peering_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateNetworkPeeringRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_network_peering_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_network_peering( + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].network_peering + mock_val = vmwareengine_resources.NetworkPeering(name="name_value") + assert arg == mock_val + arg = args[0].network_peering_id + mock_val = "network_peering_id_value" + assert arg == mock_val + + +def test_create_network_peering_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_network_peering( + vmwareengine.CreateNetworkPeeringRequest(), + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_network_peering_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_network_peering( + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].network_peering + mock_val = vmwareengine_resources.NetworkPeering(name="name_value") + assert arg == mock_val + arg = args[0].network_peering_id + mock_val = "network_peering_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_network_peering_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_network_peering( + vmwareengine.CreateNetworkPeeringRequest(), + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteNetworkPeeringRequest, + dict, + ], +) +def test_delete_network_peering(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_network_peering_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + client.delete_network_peering() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPeeringRequest() + + +@pytest.mark.asyncio +async def test_delete_network_peering_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeleteNetworkPeeringRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_network_peering_async_from_dict(): + await test_delete_network_peering_async(request_type=dict) + + +def test_delete_network_peering_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteNetworkPeeringRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_network_peering_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteNetworkPeeringRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_network_peering_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_network_peering( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_network_peering_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_network_peering( + vmwareengine.DeleteNetworkPeeringRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_network_peering_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_network_peering( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_network_peering_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_network_peering( + vmwareengine.DeleteNetworkPeeringRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateNetworkPeeringRequest, + dict, + ], +) +def test_update_network_peering(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_network_peering_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + client.update_network_peering() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPeeringRequest() + + +@pytest.mark.asyncio +async def test_update_network_peering_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.UpdateNetworkPeeringRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPeeringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_network_peering_async_from_dict(): + await test_update_network_peering_async(request_type=dict) + + +def test_update_network_peering_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateNetworkPeeringRequest() + + request.network_peering.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network_peering.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_network_peering_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateNetworkPeeringRequest() + + request.network_peering.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_network_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network_peering.name=name_value", + ) in kw["metadata"] + + +def test_update_network_peering_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_network_peering( + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].network_peering + mock_val = vmwareengine_resources.NetworkPeering(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_network_peering_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network_peering( + vmwareengine.UpdateNetworkPeeringRequest(), + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_network_peering_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_peering), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_network_peering( + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].network_peering + mock_val = vmwareengine_resources.NetworkPeering(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_network_peering_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_network_peering( + vmwareengine.UpdateNetworkPeeringRequest(), + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListPeeringRoutesRequest, + dict, + ], +) +def test_list_peering_routes(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPeeringRoutesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPeeringRoutesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPeeringRoutesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_peering_routes_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + client.list_peering_routes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPeeringRoutesRequest() + + +@pytest.mark.asyncio +async def test_list_peering_routes_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.ListPeeringRoutesRequest +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPeeringRoutesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPeeringRoutesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPeeringRoutesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_peering_routes_async_from_dict(): + await test_list_peering_routes_async(request_type=dict) + + +def test_list_peering_routes_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPeeringRoutesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + call.return_value = vmwareengine.ListPeeringRoutesResponse() + client.list_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_peering_routes_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPeeringRoutesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPeeringRoutesResponse() + ) + await client.list_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_peering_routes_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPeeringRoutesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_peering_routes( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_peering_routes_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_peering_routes( + vmwareengine.ListPeeringRoutesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_peering_routes_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPeeringRoutesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPeeringRoutesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_peering_routes( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_peering_routes_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_peering_routes( + vmwareengine.ListPeeringRoutesRequest(), + parent="parent_value", + ) + + +def test_list_peering_routes_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_peering_routes(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) + + +def test_list_peering_routes_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + pages = list(client.list_peering_routes(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_peering_routes_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_peering_routes( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.PeeringRoute) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_peering_routes_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_peering_routes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_peering_routes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateHcxActivationKeyRequest, + dict, + ], +) +def test_create_hcx_activation_key(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_hcx_activation_key_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + client.create_hcx_activation_key() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + + +@pytest.mark.asyncio +async def test_create_hcx_activation_key_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateHcxActivationKeyRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateHcxActivationKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_hcx_activation_key_async_from_dict(): + await test_create_hcx_activation_key_async(request_type=dict) + + +def test_create_hcx_activation_key_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateHcxActivationKeyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_hcx_activation_key_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateHcxActivationKeyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_hcx_activation_key_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_hcx_activation_key( + parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hcx_activation_key + mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") + assert arg == mock_val + arg = args[0].hcx_activation_key_id + mock_val = "hcx_activation_key_id_value" + assert arg == mock_val + + +def test_create_hcx_activation_key_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_hcx_activation_key( + vmwareengine.CreateHcxActivationKeyRequest(), + parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_hcx_activation_key_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_hcx_activation_key( + parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].hcx_activation_key + mock_val = vmwareengine_resources.HcxActivationKey(name="name_value") + assert arg == mock_val + arg = args[0].hcx_activation_key_id + mock_val = "hcx_activation_key_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_hcx_activation_key_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_hcx_activation_key( + vmwareengine.CreateHcxActivationKeyRequest(), + parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListHcxActivationKeysRequest, + dict, + ], +) +def test_list_hcx_activation_keys(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListHcxActivationKeysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_hcx_activation_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHcxActivationKeysPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_hcx_activation_keys_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + client.list_hcx_activation_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListHcxActivationKeysRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListHcxActivationKeysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_hcx_activation_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListHcxActivationKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListHcxActivationKeysAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_async_from_dict(): + await test_list_hcx_activation_keys_async(request_type=dict) + + +def test_list_hcx_activation_keys_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListHcxActivationKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + call.return_value = vmwareengine.ListHcxActivationKeysResponse() + client.list_hcx_activation_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListHcxActivationKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListHcxActivationKeysResponse() + ) + await client.list_hcx_activation_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_hcx_activation_keys_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListHcxActivationKeysResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_hcx_activation_keys( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_hcx_activation_keys_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_hcx_activation_keys( + vmwareengine.ListHcxActivationKeysRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListHcxActivationKeysResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListHcxActivationKeysResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_hcx_activation_keys( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_hcx_activation_keys( + vmwareengine.ListHcxActivationKeysRequest(), + parent="parent_value", + ) + + +def test_list_hcx_activation_keys_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="abc", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[], + next_page_token="def", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="ghi", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_hcx_activation_keys(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results + ) + + +def test_list_hcx_activation_keys_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="abc", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[], + next_page_token="def", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="ghi", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + ), + RuntimeError, + ) + pages = list(client.list_hcx_activation_keys(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="abc", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[], + next_page_token="def", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="ghi", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_hcx_activation_keys( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_hcx_activation_keys_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_hcx_activation_keys), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="abc", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[], + next_page_token="def", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="ghi", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_hcx_activation_keys(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetHcxActivationKeyRequest, + dict, + ], +) +def test_get_hcx_activation_key(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.HcxActivationKey( + name="name_value", + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, + activation_key="activation_key_value", + uid="uid_value", + ) + response = client.get_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.HcxActivationKey) + assert response.name == "name_value" + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE + assert response.activation_key == "activation_key_value" + assert response.uid == "uid_value" + + +def test_get_hcx_activation_key_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + client.get_hcx_activation_key() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + + +@pytest.mark.asyncio +async def test_get_hcx_activation_key_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GetHcxActivationKeyRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.HcxActivationKey( + name="name_value", + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, + activation_key="activation_key_value", + uid="uid_value", + ) + ) + response = await client.get_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetHcxActivationKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.HcxActivationKey) + assert response.name == "name_value" + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE + assert response.activation_key == "activation_key_value" + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_hcx_activation_key_async_from_dict(): + await test_get_hcx_activation_key_async(request_type=dict) + + +def test_get_hcx_activation_key_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetHcxActivationKeyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + call.return_value = vmwareengine_resources.HcxActivationKey() + client.get_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_hcx_activation_key_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetHcxActivationKeyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.HcxActivationKey() + ) + await client.get_hcx_activation_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_hcx_activation_key_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.HcxActivationKey() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_hcx_activation_key( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_hcx_activation_key_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_hcx_activation_key( + vmwareengine.GetHcxActivationKeyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_hcx_activation_key_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_hcx_activation_key), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.HcxActivationKey() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.HcxActivationKey() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_hcx_activation_key( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_hcx_activation_key_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_hcx_activation_key( + vmwareengine.GetHcxActivationKeyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetNetworkPolicyRequest, + dict, + ], +) +def test_get_network_policy(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPolicy( + name="name_value", + edge_services_cidr="edge_services_cidr_value", + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", + ) + response = client.get_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.NetworkPolicy) + assert response.name == "name_value" + assert response.edge_services_cidr == "edge_services_cidr_value" + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) + + +def test_get_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + client.get_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_network_policy_async( + transport: str = "grpc_asyncio", request_type=vmwareengine.GetNetworkPolicyRequest +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPolicy( + name="name_value", + edge_services_cidr="edge_services_cidr_value", + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", + ) + ) + response = await client.get_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.NetworkPolicy) + assert response.name == "name_value" + assert response.edge_services_cidr == "edge_services_cidr_value" + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) + + +@pytest.mark.asyncio +async def test_get_network_policy_async_from_dict(): + await test_get_network_policy_async(request_type=dict) + + +def test_get_network_policy_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + call.return_value = vmwareengine_resources.NetworkPolicy() + client.get_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_network_policy_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPolicy() + ) + await client.get_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_network_policy_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_network_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_network_policy_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_network_policy( + vmwareengine.GetNetworkPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_network_policy_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.NetworkPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.NetworkPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_network_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_network_policy_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_network_policy( + vmwareengine.GetNetworkPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListNetworkPoliciesRequest, + dict, + ], +) +def test_list_network_policies(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_network_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_network_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + client.list_network_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + + +@pytest.mark.asyncio +async def test_list_network_policies_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListNetworkPoliciesRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_network_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListNetworkPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_network_policies_async_from_dict(): + await test_list_network_policies_async(request_type=dict) + + +def test_list_network_policies_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListNetworkPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + call.return_value = vmwareengine.ListNetworkPoliciesResponse() + client.list_network_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_network_policies_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListNetworkPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPoliciesResponse() + ) + await client.list_network_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_network_policies_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_network_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_network_policies_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_policies( + vmwareengine.ListNetworkPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_network_policies_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListNetworkPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListNetworkPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_network_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_network_policies_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_network_policies( + vmwareengine.ListNetworkPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_network_policies_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_network_policies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) + + +def test_list_network_policies_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_network_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_network_policies_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_network_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.NetworkPolicy) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_network_policies_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_network_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateNetworkPolicyRequest, + dict, + ], +) +def test_create_network_policy(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + client.create_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_create_network_policy_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateNetworkPolicyRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_network_policy_async_from_dict(): + await test_create_network_policy_async(request_type=dict) + + +def test_create_network_policy_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateNetworkPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_network_policy_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateNetworkPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_network_policy_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_network_policy( + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") + assert arg == mock_val + arg = args[0].network_policy_id + mock_val = "network_policy_id_value" + assert arg == mock_val + + +def test_create_network_policy_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_network_policy( + vmwareengine.CreateNetworkPolicyRequest(), + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_network_policy_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_network_policy( + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") + assert arg == mock_val + arg = args[0].network_policy_id + mock_val = "network_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_network_policy_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_network_policy( + vmwareengine.CreateNetworkPolicyRequest(), + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateNetworkPolicyRequest, + dict, + ], +) +def test_update_network_policy(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + client.update_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_update_network_policy_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.UpdateNetworkPolicyRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_network_policy_async_from_dict(): + await test_update_network_policy_async(request_type=dict) + + +def test_update_network_policy_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateNetworkPolicyRequest() + + request.network_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_network_policy_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateNetworkPolicyRequest() + + request.network_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_network_policy_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_network_policy( + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].network_policy + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_network_policy_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network_policy( + vmwareengine.UpdateNetworkPolicyRequest(), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_network_policy_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_network_policy( + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].network_policy + mock_val = vmwareengine_resources.NetworkPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_network_policy_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_network_policy( + vmwareengine.UpdateNetworkPolicyRequest(), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteNetworkPolicyRequest, + dict, + ], +) +def test_delete_network_policy(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + client.delete_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_delete_network_policy_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeleteNetworkPolicyRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_network_policy_async_from_dict(): + await test_delete_network_policy_async(request_type=dict) + + +def test_delete_network_policy_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_network_policy_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_network_policy_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_network_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_network_policy_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_network_policy( + vmwareengine.DeleteNetworkPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_network_policy_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_network_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_network_policy_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_network_policy( + vmwareengine.DeleteNetworkPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListManagementDnsZoneBindingsRequest, + dict, + ], +) +def test_list_management_dns_zone_bindings(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListManagementDnsZoneBindingsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_management_dns_zone_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListManagementDnsZoneBindingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementDnsZoneBindingsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_management_dns_zone_bindings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + client.list_management_dns_zone_bindings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListManagementDnsZoneBindingsRequest() + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListManagementDnsZoneBindingsRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListManagementDnsZoneBindingsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_management_dns_zone_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListManagementDnsZoneBindingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementDnsZoneBindingsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_async_from_dict(): + await test_list_management_dns_zone_bindings_async(request_type=dict) + + +def test_list_management_dns_zone_bindings_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListManagementDnsZoneBindingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + call.return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() + client.list_management_dns_zone_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListManagementDnsZoneBindingsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListManagementDnsZoneBindingsResponse() + ) + await client.list_management_dns_zone_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_management_dns_zone_bindings_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_management_dns_zone_bindings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_management_dns_zone_bindings_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_management_dns_zone_bindings( + vmwareengine.ListManagementDnsZoneBindingsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListManagementDnsZoneBindingsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_management_dns_zone_bindings( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_management_dns_zone_bindings( + vmwareengine.ListManagementDnsZoneBindingsRequest(), + parent="parent_value", + ) + + +def test_list_management_dns_zone_bindings_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="abc", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[], + next_page_token="def", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="ghi", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_management_dns_zone_bindings(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.ManagementDnsZoneBinding) + for i in results + ) + + +def test_list_management_dns_zone_bindings_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="abc", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[], + next_page_token="def", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="ghi", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + ), + RuntimeError, + ) + pages = list(client.list_management_dns_zone_bindings(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="abc", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[], + next_page_token="def", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="ghi", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_management_dns_zone_bindings( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.ManagementDnsZoneBinding) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_management_dns_zone_bindings_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_management_dns_zone_bindings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="abc", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[], + next_page_token="def", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="ghi", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_management_dns_zone_bindings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetManagementDnsZoneBindingRequest, + dict, + ], +) +def test_get_management_dns_zone_binding(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value", + state=vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE, + description="description_value", + uid="uid_value", + vpc_network="vpc_network_value", + ) + response = client.get_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.ManagementDnsZoneBinding) + assert response.name == "name_value" + assert ( + response.state == vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE + ) + assert response.description == "description_value" + assert response.uid == "uid_value" + + +def test_get_management_dns_zone_binding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + client.get_management_dns_zone_binding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetManagementDnsZoneBindingRequest() + + +@pytest.mark.asyncio +async def test_get_management_dns_zone_binding_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GetManagementDnsZoneBindingRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value", + state=vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE, + description="description_value", + uid="uid_value", + ) + ) + response = await client.get_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.ManagementDnsZoneBinding) + assert response.name == "name_value" + assert ( + response.state == vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE + ) + assert response.description == "description_value" + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_management_dns_zone_binding_async_from_dict(): + await test_get_management_dns_zone_binding_async(request_type=dict) + + +def test_get_management_dns_zone_binding_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = vmwareengine_resources.ManagementDnsZoneBinding() + client.get_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_management_dns_zone_binding_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.ManagementDnsZoneBinding() + ) + await client.get_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_management_dns_zone_binding_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.ManagementDnsZoneBinding() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_management_dns_zone_binding_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_management_dns_zone_binding( + vmwareengine.GetManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_management_dns_zone_binding_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.ManagementDnsZoneBinding() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.ManagementDnsZoneBinding() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_management_dns_zone_binding_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_management_dns_zone_binding( + vmwareengine.GetManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateManagementDnsZoneBindingRequest, + dict, + ], +) +def test_create_management_dns_zone_binding(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_management_dns_zone_binding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + client.create_management_dns_zone_binding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateManagementDnsZoneBindingRequest() + + +@pytest.mark.asyncio +async def test_create_management_dns_zone_binding_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateManagementDnsZoneBindingRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_management_dns_zone_binding_async_from_dict(): + await test_create_management_dns_zone_binding_async(request_type=dict) + + +def test_create_management_dns_zone_binding_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateManagementDnsZoneBindingRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_management_dns_zone_binding_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateManagementDnsZoneBindingRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_management_dns_zone_binding_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_management_dns_zone_binding( + parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].management_dns_zone_binding + mock_val = vmwareengine_resources.ManagementDnsZoneBinding(name="name_value") + assert arg == mock_val + arg = args[0].management_dns_zone_binding_id + mock_val = "management_dns_zone_binding_id_value" + assert arg == mock_val + + +def test_create_management_dns_zone_binding_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_management_dns_zone_binding( + vmwareengine.CreateManagementDnsZoneBindingRequest(), + parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_management_dns_zone_binding_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_management_dns_zone_binding( + parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].management_dns_zone_binding + mock_val = vmwareengine_resources.ManagementDnsZoneBinding(name="name_value") + assert arg == mock_val + arg = args[0].management_dns_zone_binding_id + mock_val = "management_dns_zone_binding_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_management_dns_zone_binding_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_management_dns_zone_binding( + vmwareengine.CreateManagementDnsZoneBindingRequest(), + parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateManagementDnsZoneBindingRequest, + dict, + ], +) +def test_update_management_dns_zone_binding(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_management_dns_zone_binding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + client.update_management_dns_zone_binding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateManagementDnsZoneBindingRequest() + + +@pytest.mark.asyncio +async def test_update_management_dns_zone_binding_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.UpdateManagementDnsZoneBindingRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_management_dns_zone_binding_async_from_dict(): + await test_update_management_dns_zone_binding_async(request_type=dict) + + +def test_update_management_dns_zone_binding_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateManagementDnsZoneBindingRequest() + + request.management_dns_zone_binding.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "management_dns_zone_binding.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_management_dns_zone_binding_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateManagementDnsZoneBindingRequest() + + request.management_dns_zone_binding.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "management_dns_zone_binding.name=name_value", + ) in kw["metadata"] + + +def test_update_management_dns_zone_binding_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_management_dns_zone_binding( + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].management_dns_zone_binding + mock_val = vmwareengine_resources.ManagementDnsZoneBinding(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_management_dns_zone_binding_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_management_dns_zone_binding( + vmwareengine.UpdateManagementDnsZoneBindingRequest(), + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_management_dns_zone_binding_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_management_dns_zone_binding( + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].management_dns_zone_binding + mock_val = vmwareengine_resources.ManagementDnsZoneBinding(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_management_dns_zone_binding_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_management_dns_zone_binding( + vmwareengine.UpdateManagementDnsZoneBindingRequest(), + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteManagementDnsZoneBindingRequest, + dict, + ], +) +def test_delete_management_dns_zone_binding(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_management_dns_zone_binding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + client.delete_management_dns_zone_binding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteManagementDnsZoneBindingRequest() + + +@pytest.mark.asyncio +async def test_delete_management_dns_zone_binding_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeleteManagementDnsZoneBindingRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_management_dns_zone_binding_async_from_dict(): + await test_delete_management_dns_zone_binding_async(request_type=dict) + + +def test_delete_management_dns_zone_binding_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_management_dns_zone_binding_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_management_dns_zone_binding_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_management_dns_zone_binding_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_management_dns_zone_binding( + vmwareengine.DeleteManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_management_dns_zone_binding_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_management_dns_zone_binding_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_management_dns_zone_binding( + vmwareengine.DeleteManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.RepairManagementDnsZoneBindingRequest, + dict, + ], +) +def test_repair_management_dns_zone_binding(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.repair_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RepairManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_repair_management_dns_zone_binding_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + client.repair_management_dns_zone_binding() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RepairManagementDnsZoneBindingRequest() + + +@pytest.mark.asyncio +async def test_repair_management_dns_zone_binding_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.RepairManagementDnsZoneBindingRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.repair_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RepairManagementDnsZoneBindingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_repair_management_dns_zone_binding_async_from_dict(): + await test_repair_management_dns_zone_binding_async(request_type=dict) + + +def test_repair_management_dns_zone_binding_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.RepairManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.repair_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_repair_management_dns_zone_binding_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.RepairManagementDnsZoneBindingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.repair_management_dns_zone_binding(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_repair_management_dns_zone_binding_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.repair_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_repair_management_dns_zone_binding_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.repair_management_dns_zone_binding( + vmwareengine.RepairManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_repair_management_dns_zone_binding_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.repair_management_dns_zone_binding), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.repair_management_dns_zone_binding( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_repair_management_dns_zone_binding_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.repair_management_dns_zone_binding( + vmwareengine.RepairManagementDnsZoneBindingRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateVmwareEngineNetworkRequest, + dict, + ], +) +def test_create_vmware_engine_network(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_vmware_engine_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + client.create_vmware_engine_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + + +@pytest.mark.asyncio +async def test_create_vmware_engine_network_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreateVmwareEngineNetworkRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreateVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_vmware_engine_network_async_from_dict(): + await test_create_vmware_engine_network_async(request_type=dict) + + +def test_create_vmware_engine_network_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateVmwareEngineNetworkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_vmware_engine_network_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreateVmwareEngineNetworkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_vmware_engine_network_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_vmware_engine_network( + parent="parent_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vmware_engine_network + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + assert arg == mock_val + arg = args[0].vmware_engine_network_id + mock_val = "vmware_engine_network_id_value" + assert arg == mock_val + + +def test_create_vmware_engine_network_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_vmware_engine_network( + vmwareengine.CreateVmwareEngineNetworkRequest(), + parent="parent_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_vmware_engine_network_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_vmware_engine_network( + parent="parent_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vmware_engine_network + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + assert arg == mock_val + arg = args[0].vmware_engine_network_id + mock_val = "vmware_engine_network_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_vmware_engine_network_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_vmware_engine_network( + vmwareengine.CreateVmwareEngineNetworkRequest(), + parent="parent_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateVmwareEngineNetworkRequest, + dict, + ], +) +def test_update_vmware_engine_network(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_vmware_engine_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + client.update_vmware_engine_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + + +@pytest.mark.asyncio +async def test_update_vmware_engine_network_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.UpdateVmwareEngineNetworkRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdateVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_vmware_engine_network_async_from_dict(): + await test_update_vmware_engine_network_async(request_type=dict) + + +def test_update_vmware_engine_network_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateVmwareEngineNetworkRequest() + + request.vmware_engine_network.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vmware_engine_network.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_vmware_engine_network_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdateVmwareEngineNetworkRequest() + + request.vmware_engine_network.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vmware_engine_network.name=name_value", + ) in kw["metadata"] + + +def test_update_vmware_engine_network_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_vmware_engine_network( + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].vmware_engine_network + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_vmware_engine_network_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vmware_engine_network( + vmwareengine.UpdateVmwareEngineNetworkRequest(), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_vmware_engine_network_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_vmware_engine_network( + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].vmware_engine_network + mock_val = vmwareengine_resources.VmwareEngineNetwork(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_vmware_engine_network_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_vmware_engine_network( + vmwareengine.UpdateVmwareEngineNetworkRequest(), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteVmwareEngineNetworkRequest, + dict, + ], +) +def test_delete_vmware_engine_network(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_vmware_engine_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + client.delete_vmware_engine_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + + +@pytest.mark.asyncio +async def test_delete_vmware_engine_network_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeleteVmwareEngineNetworkRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeleteVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_vmware_engine_network_async_from_dict(): + await test_delete_vmware_engine_network_async(request_type=dict) + + +def test_delete_vmware_engine_network_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteVmwareEngineNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_vmware_engine_network_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeleteVmwareEngineNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_vmware_engine_network_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_vmware_engine_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_vmware_engine_network_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_vmware_engine_network( + vmwareengine.DeleteVmwareEngineNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_vmware_engine_network_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_vmware_engine_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_vmware_engine_network_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_vmware_engine_network( + vmwareengine.DeleteVmwareEngineNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetVmwareEngineNetworkRequest, + dict, + ], +) +def test_get_vmware_engine_network(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.VmwareEngineNetwork( + name="name_value", + description="description_value", + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, + uid="uid_value", + etag="etag_value", + ) + response = client.get_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY + assert response.uid == "uid_value" + assert response.etag == "etag_value" + + +def test_get_vmware_engine_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + client.get_vmware_engine_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + + +@pytest.mark.asyncio +async def test_get_vmware_engine_network_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GetVmwareEngineNetworkRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.VmwareEngineNetwork( + name="name_value", + description="description_value", + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, + uid="uid_value", + etag="etag_value", + ) + ) + response = await client.get_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetVmwareEngineNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY + assert response.uid == "uid_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_vmware_engine_network_async_from_dict(): + await test_get_vmware_engine_network_async(request_type=dict) + + +def test_get_vmware_engine_network_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetVmwareEngineNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + call.return_value = vmwareengine_resources.VmwareEngineNetwork() + client.get_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_vmware_engine_network_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetVmwareEngineNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.VmwareEngineNetwork() + ) + await client.get_vmware_engine_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vmware_engine_network_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.VmwareEngineNetwork() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vmware_engine_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_vmware_engine_network_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vmware_engine_network( + vmwareengine.GetVmwareEngineNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_vmware_engine_network_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vmware_engine_network), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.VmwareEngineNetwork() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.VmwareEngineNetwork() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vmware_engine_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vmware_engine_network_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vmware_engine_network( + vmwareengine.GetVmwareEngineNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListVmwareEngineNetworksRequest, + dict, + ], +) +def test_list_vmware_engine_networks(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListVmwareEngineNetworksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_vmware_engine_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVmwareEngineNetworksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_vmware_engine_networks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + client.list_vmware_engine_networks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListVmwareEngineNetworksRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListVmwareEngineNetworksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_vmware_engine_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListVmwareEngineNetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVmwareEngineNetworksAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_async_from_dict(): + await test_list_vmware_engine_networks_async(request_type=dict) + + +def test_list_vmware_engine_networks_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListVmwareEngineNetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + client.list_vmware_engine_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListVmwareEngineNetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListVmwareEngineNetworksResponse() + ) + await client.list_vmware_engine_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_vmware_engine_networks_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_vmware_engine_networks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_vmware_engine_networks_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vmware_engine_networks( + vmwareengine.ListVmwareEngineNetworksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListVmwareEngineNetworksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_vmware_engine_networks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_vmware_engine_networks( + vmwareengine.ListVmwareEngineNetworksRequest(), + parent="parent_value", + ) + + +def test_list_vmware_engine_networks_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="abc", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[], + next_page_token="def", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="ghi", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_vmware_engine_networks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results + ) + + +def test_list_vmware_engine_networks_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="abc", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[], + next_page_token="def", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="ghi", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + ), + RuntimeError, + ) + pages = list(client.list_vmware_engine_networks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="abc", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[], + next_page_token="def", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="ghi", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_vmware_engine_networks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_vmware_engine_networks_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vmware_engine_networks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="abc", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[], + next_page_token="def", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="ghi", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_vmware_engine_networks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreatePrivateConnectionRequest, + dict, + ], +) +def test_create_private_connection(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + client.create_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreatePrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_create_private_connection_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.CreatePrivateConnectionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.CreatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_private_connection_async_from_dict(): + await test_create_private_connection_async(request_type=dict) + + +def test_create_private_connection_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreatePrivateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_private_connection_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.CreatePrivateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_private_connection_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_private_connection( + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = vmwareengine_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val + + +def test_create_private_connection_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + vmwareengine.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_private_connection( + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = vmwareengine_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_private_connection_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_private_connection( + vmwareengine.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetPrivateConnectionRequest, + dict, + ], +) +def test_get_private_connection(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.PrivateConnection( + name="name_value", + description="description_value", + state=vmwareengine_resources.PrivateConnection.State.CREATING, + vmware_engine_network="vmware_engine_network_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", + type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, + peering_id="peering_id_value", + routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, + uid="uid_value", + service_network="service_network_value", + peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, + ) + response = client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.PrivateConnection) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING + assert response.vmware_engine_network == "vmware_engine_network_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) + assert ( + response.type_ + == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS + ) + assert response.peering_id == "peering_id_value" + assert ( + response.routing_mode + == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL + ) + assert response.uid == "uid_value" + assert response.service_network == "service_network_value" + assert ( + response.peering_state + == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE + ) + + +def test_get_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + client.get_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetPrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_get_private_connection_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GetPrivateConnectionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.PrivateConnection( + name="name_value", + description="description_value", + state=vmwareengine_resources.PrivateConnection.State.CREATING, + vmware_engine_network="vmware_engine_network_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", + type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, + peering_id="peering_id_value", + routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, + uid="uid_value", + service_network="service_network_value", + peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, + ) + ) + response = await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.PrivateConnection) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING + assert response.vmware_engine_network == "vmware_engine_network_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) + assert ( + response.type_ + == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS + ) + assert response.peering_id == "peering_id_value" + assert ( + response.routing_mode + == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL + ) + assert response.uid == "uid_value" + assert response.service_network == "service_network_value" + assert ( + response.peering_state + == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE + ) + + +@pytest.mark.asyncio +async def test_get_private_connection_async_from_dict(): + await test_get_private_connection_async(request_type=dict) + + +def test_get_private_connection_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = vmwareengine_resources.PrivateConnection() + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_private_connection_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.PrivateConnection() + ) + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_private_connection_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.PrivateConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_private_connection_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + vmwareengine.GetPrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.PrivateConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.PrivateConnection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_private_connection( + vmwareengine.GetPrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListPrivateConnectionsRequest, + dict, + ], +) +def test_list_private_connections(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + client.list_private_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_private_connections_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListPrivateConnectionsRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_private_connections_async_from_dict(): + await test_list_private_connections_async(request_type=dict) + + +def test_list_private_connections_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = vmwareengine.ListPrivateConnectionsResponse() + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_private_connections_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionsResponse() + ) + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_private_connections_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_private_connections_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + vmwareengine.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connections( + vmwareengine.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_private_connections_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_private_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.PrivateConnection) for i in results + ) + + +def test_list_private_connections_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.PrivateConnection) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_private_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdatePrivateConnectionRequest, + dict, + ], +) +def test_update_private_connection(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + client.update_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_update_private_connection_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.UpdatePrivateConnectionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.UpdatePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_private_connection_async_from_dict(): + await test_update_private_connection_async(request_type=dict) + + +def test_update_private_connection_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdatePrivateConnectionRequest() + + request.private_connection.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "private_connection.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_private_connection_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.UpdatePrivateConnectionRequest() + + request.private_connection.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "private_connection.name=name_value", + ) in kw["metadata"] + + +def test_update_private_connection_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_private_connection( + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].private_connection + mock_val = vmwareengine_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_private_connection_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_private_connection( + vmwareengine.UpdatePrivateConnectionRequest(), + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_private_connection_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_private_connection( + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].private_connection + mock_val = vmwareengine_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_private_connection_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_private_connection( + vmwareengine.UpdatePrivateConnectionRequest(), + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeletePrivateConnectionRequest, + dict, + ], +) +def test_delete_private_connection(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + client.delete_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_delete_private_connection_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.DeletePrivateConnectionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_private_connection_async_from_dict(): + await test_delete_private_connection_async(request_type=dict) + + +def test_delete_private_connection_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_private_connection_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_private_connection_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_private_connection_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + vmwareengine.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_private_connection( + vmwareengine.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + dict, + ], +) +def test_list_private_connection_peering_routes(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_private_connection_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_private_connection_peering_routes_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + client.list_private_connection_peering_routes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_private_connection_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_async_from_dict(): + await test_list_private_connection_peering_routes_async(request_type=dict) + + +def test_list_private_connection_peering_routes_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + client.list_private_connection_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + ) + await client.list_private_connection_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_private_connection_peering_routes_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connection_peering_routes( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_private_connection_peering_routes_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connection_peering_routes( + vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connection_peering_routes( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connection_peering_routes( + vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), + parent="parent_value", + ) + + +def test_list_private_connection_peering_routes_pager(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_private_connection_peering_routes(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) + + +def test_list_private_connection_peering_routes_pages(transport_name: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connection_peering_routes(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_async_pager(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connection_peering_routes( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vmwareengine_resources.PeeringRoute) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_private_connection_peering_routes_async_pages(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connection_peering_routes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_private_connection_peering_routes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GrantDnsBindPermissionRequest, + dict, + ], +) +def test_grant_dns_bind_permission(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.grant_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GrantDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_grant_dns_bind_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + client.grant_dns_bind_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GrantDnsBindPermissionRequest() + + +@pytest.mark.asyncio +async def test_grant_dns_bind_permission_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GrantDnsBindPermissionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.grant_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GrantDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_grant_dns_bind_permission_async_from_dict(): + await test_grant_dns_bind_permission_async(request_type=dict) + + +def test_grant_dns_bind_permission_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GrantDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.grant_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_grant_dns_bind_permission_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GrantDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.grant_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_grant_dns_bind_permission_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.grant_dns_bind_permission( + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].principal + mock_val = vmwareengine_resources.Principal(user="user_value") + assert arg == mock_val + + +def test_grant_dns_bind_permission_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.grant_dns_bind_permission( + vmwareengine.GrantDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + +@pytest.mark.asyncio +async def test_grant_dns_bind_permission_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.grant_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.grant_dns_bind_permission( + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].principal + mock_val = vmwareengine_resources.Principal(user="user_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_grant_dns_bind_permission_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.grant_dns_bind_permission( + vmwareengine.GrantDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetDnsBindPermissionRequest, + dict, + ], +) +def test_get_dns_bind_permission(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.DnsBindPermission( + name="name_value", + ) + response = client.get_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.DnsBindPermission) + assert response.name == "name_value" + + +def test_get_dns_bind_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + client.get_dns_bind_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetDnsBindPermissionRequest() + + +@pytest.mark.asyncio +async def test_get_dns_bind_permission_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.GetDnsBindPermissionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.DnsBindPermission( + name="name_value", + ) + ) + response = await client.get_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.GetDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.DnsBindPermission) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_dns_bind_permission_async_from_dict(): + await test_get_dns_bind_permission_async(request_type=dict) + + +def test_get_dns_bind_permission_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + call.return_value = vmwareengine_resources.DnsBindPermission() + client.get_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dns_bind_permission_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.GetDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.DnsBindPermission() + ) + await client.get_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dns_bind_permission_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.DnsBindPermission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dns_bind_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dns_bind_permission_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dns_bind_permission( + vmwareengine.GetDnsBindPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dns_bind_permission_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vmwareengine_resources.DnsBindPermission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vmwareengine_resources.DnsBindPermission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dns_bind_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dns_bind_permission_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dns_bind_permission( + vmwareengine.GetDnsBindPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.RevokeDnsBindPermissionRequest, + dict, + ], +) +def test_revoke_dns_bind_permission(request_type, transport: str = "grpc"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.revoke_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RevokeDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_revoke_dns_bind_permission_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + client.revoke_dns_bind_permission() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RevokeDnsBindPermissionRequest() + + +@pytest.mark.asyncio +async def test_revoke_dns_bind_permission_async( + transport: str = "grpc_asyncio", + request_type=vmwareengine.RevokeDnsBindPermissionRequest, +): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.revoke_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vmwareengine.RevokeDnsBindPermissionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_revoke_dns_bind_permission_async_from_dict(): + await test_revoke_dns_bind_permission_async(request_type=dict) + + +def test_revoke_dns_bind_permission_field_headers(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.RevokeDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.revoke_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_revoke_dns_bind_permission_field_headers_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vmwareengine.RevokeDnsBindPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.revoke_dns_bind_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_revoke_dns_bind_permission_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.revoke_dns_bind_permission( + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].principal + mock_val = vmwareengine_resources.Principal(user="user_value") + assert arg == mock_val + + +def test_revoke_dns_bind_permission_flattened_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.revoke_dns_bind_permission( + vmwareengine.RevokeDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + +@pytest.mark.asyncio +async def test_revoke_dns_bind_permission_flattened_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_dns_bind_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.revoke_dns_bind_permission( + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].principal + mock_val = vmwareengine_resources.Principal(user="user_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_revoke_dns_bind_permission_flattened_error_async(): + client = VmwareEngineAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.revoke_dns_bind_permission( + vmwareengine.RevokeDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListPrivateCloudsRequest, + dict, + ], +) +def test_list_private_clouds_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListPrivateCloudsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_private_clouds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateCloudsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_clouds_rest_required_fields( + request_type=vmwareengine.ListPrivateCloudsRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_clouds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_clouds._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListPrivateCloudsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_private_clouds(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_private_clouds_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_private_clouds._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_private_clouds_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_private_clouds" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_private_clouds" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListPrivateCloudsRequest.pb( + vmwareengine.ListPrivateCloudsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListPrivateCloudsResponse.to_json( + vmwareengine.ListPrivateCloudsResponse() + ) + + request = vmwareengine.ListPrivateCloudsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListPrivateCloudsResponse() + + client.list_private_clouds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_private_clouds_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListPrivateCloudsRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_private_clouds(request) + + +def test_list_private_clouds_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListPrivateCloudsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_private_clouds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/privateClouds" + % client.transport._host, + args[1], + ) + + +def test_list_private_clouds_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_clouds( + vmwareengine.ListPrivateCloudsRequest(), + parent="parent_value", + ) + + +def test_list_private_clouds_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListPrivateCloudsResponse( + private_clouds=[ + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + ], + next_page_token="abc", + ), + vmwareengine.ListPrivateCloudsResponse( + private_clouds=[], + next_page_token="def", + ), + vmwareengine.ListPrivateCloudsResponse( + private_clouds=[ + vmwareengine_resources.PrivateCloud(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPrivateCloudsResponse( + private_clouds=[ + vmwareengine_resources.PrivateCloud(), + vmwareengine_resources.PrivateCloud(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListPrivateCloudsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_private_clouds(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.PrivateCloud) for i in results) + + pages = list(client.list_private_clouds(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetPrivateCloudRequest, + dict, + ], +) +def test_get_private_cloud_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.PrivateCloud( + name="name_value", + state=vmwareengine_resources.PrivateCloud.State.ACTIVE, + description="description_value", + uid="uid_value", + type_=vmwareengine_resources.PrivateCloud.Type.TIME_LIMITED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.PrivateCloud.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_private_cloud(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.PrivateCloud) + assert response.name == "name_value" + assert response.state == vmwareengine_resources.PrivateCloud.State.ACTIVE + assert response.description == "description_value" + assert response.uid == "uid_value" + assert response.type_ == vmwareengine_resources.PrivateCloud.Type.TIME_LIMITED + + +def test_get_private_cloud_rest_required_fields( + request_type=vmwareengine.GetPrivateCloudRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.PrivateCloud() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.PrivateCloud.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_private_cloud(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_private_cloud_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_private_cloud._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_private_cloud_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_private_cloud" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_private_cloud" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetPrivateCloudRequest.pb( + vmwareengine.GetPrivateCloudRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.PrivateCloud.to_json( + vmwareengine_resources.PrivateCloud() + ) + + request = vmwareengine.GetPrivateCloudRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.PrivateCloud() + + client.get_private_cloud( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_private_cloud_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetPrivateCloudRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_private_cloud(request) + + +def test_get_private_cloud_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.PrivateCloud() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.PrivateCloud.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_private_cloud(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*}" + % client.transport._host, + args[1], + ) + + +def test_get_private_cloud_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_cloud( + vmwareengine.GetPrivateCloudRequest(), + name="name_value", + ) + + +def test_get_private_cloud_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreatePrivateCloudRequest, + dict, + ], +) +def test_create_private_cloud_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_cloud"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "state": 1, + "network_config": { + "management_cidr": "management_cidr_value", + "vmware_engine_network": "vmware_engine_network_value", + "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", + "management_ip_address_layout_version": 3836, + "dns_server_ip": "dns_server_ip_value", + }, + "management_cluster": { + "cluster_id": "cluster_id_value", + "node_type_configs": {}, + "stretched_cluster_config": { + "preferred_location": "preferred_location_value", + "secondary_location": "secondary_location_value", + }, + }, + "description": "description_value", + "hcx": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "nsx": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "vcenter": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "uid": "uid_value", + "type_": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreatePrivateCloudRequest.meta.fields["private_cloud"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["private_cloud"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["private_cloud"][field])): + del request_init["private_cloud"][field][i][subfield] + else: + del request_init["private_cloud"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_private_cloud(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_private_cloud_rest_required_fields( + request_type=vmwareengine.CreatePrivateCloudRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["private_cloud_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "privateCloudId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "privateCloudId" in jsonified_request + assert jsonified_request["privateCloudId"] == request_init["private_cloud_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["privateCloudId"] = "private_cloud_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_cloud._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "private_cloud_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "privateCloudId" in jsonified_request + assert jsonified_request["privateCloudId"] == "private_cloud_id_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_private_cloud(request) + + expected_params = [ + ( + "privateCloudId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_private_cloud_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_private_cloud._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "privateCloudId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "privateCloudId", + "privateCloud", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_private_cloud_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_private_cloud" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_create_private_cloud" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.CreatePrivateCloudRequest.pb( + vmwareengine.CreatePrivateCloudRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.CreatePrivateCloudRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_private_cloud( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_private_cloud_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreatePrivateCloudRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_private_cloud(request) + + +def test_create_private_cloud_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), + private_cloud_id="private_cloud_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_private_cloud(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/privateClouds" + % client.transport._host, + args[1], + ) + + +def test_create_private_cloud_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_cloud( + vmwareengine.CreatePrivateCloudRequest(), + parent="parent_value", + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), + private_cloud_id="private_cloud_id_value", + ) + + +def test_create_private_cloud_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdatePrivateCloudRequest, + dict, + ], +) +def test_update_private_cloud_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "private_cloud": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + } + request_init["private_cloud"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "expire_time": {}, + "state": 1, + "network_config": { + "management_cidr": "management_cidr_value", + "vmware_engine_network": "vmware_engine_network_value", + "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", + "management_ip_address_layout_version": 3836, + "dns_server_ip": "dns_server_ip_value", + }, + "management_cluster": { + "cluster_id": "cluster_id_value", + "node_type_configs": {}, + "stretched_cluster_config": { + "preferred_location": "preferred_location_value", + "secondary_location": "secondary_location_value", + }, + }, + "description": "description_value", + "hcx": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "nsx": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "vcenter": { + "internal_ip": "internal_ip_value", + "version": "version_value", + "state": 1, + "fqdn": "fqdn_value", + }, + "uid": "uid_value", + "type_": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdatePrivateCloudRequest.meta.fields["private_cloud"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["private_cloud"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["private_cloud"][field])): + del request_init["private_cloud"][field][i][subfield] + else: + del request_init["private_cloud"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_private_cloud(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_private_cloud_rest_required_fields( + request_type=vmwareengine.UpdatePrivateCloudRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_private_cloud._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_private_cloud(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_private_cloud_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_private_cloud._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "privateCloud", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_private_cloud_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_private_cloud" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_private_cloud" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdatePrivateCloudRequest.pb( + vmwareengine.UpdatePrivateCloudRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdatePrivateCloudRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_private_cloud( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_private_cloud_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdatePrivateCloudRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "private_cloud": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_private_cloud(request) + + +def test_update_private_cloud_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "private_cloud": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_private_cloud(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{private_cloud.name=projects/*/locations/*/privateClouds/*}" + % client.transport._host, + args[1], + ) + + +def test_update_private_cloud_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_private_cloud( + vmwareengine.UpdatePrivateCloudRequest(), + private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_private_cloud_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeletePrivateCloudRequest, + dict, + ], +) +def test_delete_private_cloud_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_private_cloud(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_private_cloud_rest_required_fields( + request_type=vmwareengine.DeletePrivateCloudRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_cloud._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delay_hours", + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_private_cloud(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_private_cloud_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_private_cloud._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "delayHours", + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_private_cloud_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_private_cloud" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_delete_private_cloud" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.DeletePrivateCloudRequest.pb( + vmwareengine.DeletePrivateCloudRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.DeletePrivateCloudRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_private_cloud( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_private_cloud_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeletePrivateCloudRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_private_cloud(request) + + +def test_delete_private_cloud_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_private_cloud(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_private_cloud_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_cloud( + vmwareengine.DeletePrivateCloudRequest(), + name="name_value", + ) + + +def test_delete_private_cloud_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UndeletePrivateCloudRequest, + dict, + ], +) +def test_undelete_private_cloud_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.undelete_private_cloud(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_undelete_private_cloud_rest_required_fields( + request_type=vmwareengine.UndeletePrivateCloudRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undelete_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undelete_private_cloud._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.undelete_private_cloud(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_undelete_private_cloud_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.undelete_private_cloud._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undelete_private_cloud_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_undelete_private_cloud" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_undelete_private_cloud" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UndeletePrivateCloudRequest.pb( + vmwareengine.UndeletePrivateCloudRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UndeletePrivateCloudRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.undelete_private_cloud( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_undelete_private_cloud_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UndeletePrivateCloudRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.undelete_private_cloud(request) + + +def test_undelete_private_cloud_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.undelete_private_cloud(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*}:undelete" + % client.transport._host, + args[1], + ) + + +def test_undelete_private_cloud_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undelete_private_cloud( + vmwareengine.UndeletePrivateCloudRequest(), + name="name_value", + ) + + +def test_undelete_private_cloud_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListClustersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_clusters_rest_required_fields( + request_type=vmwareengine.ListClustersRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListClustersRequest.pb( + vmwareengine.ListClustersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListClustersResponse.to_json( + vmwareengine.ListClustersResponse() + ) + + request = vmwareengine.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListClustersRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + vmwareengine.ListClustersRequest(), + parent="parent_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListClustersResponse( + clusters=[ + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + ], + next_page_token="abc", + ), + vmwareengine.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + vmwareengine.ListClustersResponse( + clusters=[ + vmwareengine_resources.Cluster(), + ], + next_page_token="ghi", + ), + vmwareengine.ListClustersResponse( + clusters=[ + vmwareengine_resources.Cluster(), + vmwareengine_resources.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(vmwareengine.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Cluster( + name="name_value", + state=vmwareengine_resources.Cluster.State.ACTIVE, + management=True, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.Cluster) + assert response.name == "name_value" + assert response.state == vmwareengine_resources.Cluster.State.ACTIVE + assert response.management is True + assert response.uid == "uid_value" + + +def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRequest): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetClusterRequest.pb(vmwareengine.GetClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.Cluster.to_json( + vmwareengine_resources.Cluster() + ) + + request = vmwareengine.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetClusterRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + vmwareengine.GetClusterRequest(), + name="name_value", + ) + + +def test_get_cluster_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request_init["cluster"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "management": True, + "uid": "uid_value", + "node_type_configs": {}, + "stretched_cluster_config": { + "preferred_location": "preferred_location_value", + "secondary_location": "secondary_location_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_required_fields( + request_type=vmwareengine.CreateClusterRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "clusterId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == request_init["cluster_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["clusterId"] = "cluster_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clusterId" in jsonified_request + assert jsonified_request["clusterId"] == "cluster_id_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [ + ( + "clusterId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "clusterId", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.CreateClusterRequest.pb( + vmwareengine.CreateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateClusterRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + cluster=vmwareengine_resources.Cluster(name="name_value"), + cluster_id="cluster_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters" + % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + vmwareengine.CreateClusterRequest(), + parent="parent_value", + cluster=vmwareengine_resources.Cluster(name="name_value"), + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + } + request_init["cluster"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "management": True, + "uid": "uid_value", + "node_type_configs": {}, + "stretched_cluster_config": { + "preferred_location": "preferred_location_value", + "secondary_location": "secondary_location_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateClusterRequest.meta.fields["cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cluster"][field])): + del request_init["cluster"][field][i][subfield] + else: + del request_init["cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_required_fields( + request_type=vmwareengine.UpdateClusterRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdateClusterRequest.pb( + vmwareengine.UpdateClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateClusterRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "cluster": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "cluster": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + cluster=vmwareengine_resources.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{cluster.name=projects/*/locations/*/privateClouds/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + vmwareengine.UpdateClusterRequest(), + cluster=vmwareengine_resources.Cluster(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_required_fields( + request_type=vmwareengine.DeleteClusterRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.DeleteClusterRequest.pb( + vmwareengine.DeleteClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteClusterRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + vmwareengine.DeleteClusterRequest(), + name="name_value", + ) + + +def test_delete_cluster_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListNodesRequest, + dict, + ], +) +def test_list_nodes_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNodesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_nodes_rest_required_fields(request_type=vmwareengine.ListNodesRequest): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_nodes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_nodes_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_nodes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_nodes_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_nodes" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_nodes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListNodesRequest.pb(vmwareengine.ListNodesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListNodesResponse.to_json( + vmwareengine.ListNodesResponse() + ) + + request = vmwareengine.ListNodesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListNodesResponse() + + client.list_nodes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_nodes_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListNodesRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_nodes(request) + + +def test_list_nodes_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*/clusters/*}/nodes" + % client.transport._host, + args[1], + ) + + +def test_list_nodes_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_nodes( + vmwareengine.ListNodesRequest(), + parent="parent_value", + ) + + +def test_list_nodes_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodesResponse( + nodes=[], + next_page_token="def", + ), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodesResponse( + nodes=[ + vmwareengine_resources.Node(), + vmwareengine_resources.Node(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(vmwareengine.ListNodesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + } + + pager = client.list_nodes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.Node) for i in results) + + pages = list(client.list_nodes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetNodeRequest, + dict, + ], +) +def test_get_node_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4/nodes/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Node( + name="name_value", + fqdn="fqdn_value", + internal_ip="internal_ip_value", + node_type_id="node_type_id_value", + version="version_value", + custom_core_count=1835, + state=vmwareengine_resources.Node.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Node.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_node(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.Node) + assert response.name == "name_value" + assert response.fqdn == "fqdn_value" + assert response.internal_ip == "internal_ip_value" + assert response.node_type_id == "node_type_id_value" + assert response.version == "version_value" + assert response.custom_core_count == 1835 + assert response.state == vmwareengine_resources.Node.State.ACTIVE + + +def test_get_node_rest_required_fields(request_type=vmwareengine.GetNodeRequest): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Node() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.Node.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_node(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_node_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_node._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_node_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_node" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_node" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetNodeRequest.pb(vmwareengine.GetNodeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.Node.to_json( + vmwareengine_resources.Node() + ) + + request = vmwareengine.GetNodeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.Node() + + client.get_node( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_node_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetNodeRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4/nodes/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_node(request) + + +def test_get_node_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Node() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4/nodes/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Node.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_node(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*/nodes/*}" + % client.transport._host, + args[1], + ) + + +def test_get_node_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node( + vmwareengine.GetNodeRequest(), + name="name_value", + ) + + +def test_get_node_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListExternalAddressesRequest, + dict, + ], +) +def test_list_external_addresses_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAddressesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAddressesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_addresses(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExternalAddressesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_external_addresses_rest_required_fields( + request_type=vmwareengine.ListExternalAddressesRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_addresses._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_addresses._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAddressesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAddressesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_external_addresses(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_external_addresses_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_external_addresses._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_addresses_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_external_addresses" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_external_addresses" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListExternalAddressesRequest.pb( + vmwareengine.ListExternalAddressesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListExternalAddressesResponse.to_json( + vmwareengine.ListExternalAddressesResponse() + ) + + request = vmwareengine.ListExternalAddressesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListExternalAddressesResponse() + + client.list_external_addresses( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_addresses_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListExternalAddressesRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_addresses(request) + + +def test_list_external_addresses_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAddressesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAddressesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_addresses(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/externalAddresses" + % client.transport._host, + args[1], + ) + + +def test_list_external_addresses_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_external_addresses( + vmwareengine.ListExternalAddressesRequest(), + parent="parent_value", + ) + + +def test_list_external_addresses_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListExternalAddressesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_external_addresses(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in results + ) + + pages = list(client.list_external_addresses(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.FetchNetworkPolicyExternalAddressesRequest, + dict, + ], +) +def test_fetch_network_policy_external_addresses_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "network_policy": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_network_policy_external_addresses(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchNetworkPolicyExternalAddressesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_network_policy_external_addresses_rest_required_fields( + request_type=vmwareengine.FetchNetworkPolicyExternalAddressesRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["network_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_network_policy_external_addresses._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkPolicy"] = "network_policy_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_network_policy_external_addresses._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkPolicy" in jsonified_request + assert jsonified_request["networkPolicy"] == "network_policy_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_network_policy_external_addresses(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_network_policy_external_addresses_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.fetch_network_policy_external_addresses._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("networkPolicy",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_network_policy_external_addresses_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, + "post_fetch_network_policy_external_addresses", + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, + "pre_fetch_network_policy_external_addresses", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.FetchNetworkPolicyExternalAddressesRequest.pb( + vmwareengine.FetchNetworkPolicyExternalAddressesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + vmwareengine.FetchNetworkPolicyExternalAddressesResponse.to_json( + vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + ) + ) + + request = vmwareengine.FetchNetworkPolicyExternalAddressesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + + client.fetch_network_policy_external_addresses( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_network_policy_external_addresses_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.FetchNetworkPolicyExternalAddressesRequest, +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "network_policy": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_network_policy_external_addresses(request) + + +def test_fetch_network_policy_external_addresses_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "network_policy": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + network_policy="network_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.FetchNetworkPolicyExternalAddressesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_network_policy_external_addresses(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{network_policy=projects/*/locations/*/networkPolicies/*}:fetchExternalAddresses" + % client.transport._host, + args[1], + ) + + +def test_fetch_network_policy_external_addresses_rest_flattened_error( + transport: str = "rest", +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_network_policy_external_addresses( + vmwareengine.FetchNetworkPolicyExternalAddressesRequest(), + network_policy="network_policy_value", + ) + + +def test_fetch_network_policy_external_addresses_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="abc", + ), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[], + next_page_token="def", + ), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + ], + next_page_token="ghi", + ), + vmwareengine.FetchNetworkPolicyExternalAddressesResponse( + external_addresses=[ + vmwareengine_resources.ExternalAddress(), + vmwareengine_resources.ExternalAddress(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.FetchNetworkPolicyExternalAddressesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "network_policy": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + + pager = client.fetch_network_policy_external_addresses(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.ExternalAddress) for i in results + ) + + pages = list( + client.fetch_network_policy_external_addresses(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetExternalAddressRequest, + dict, + ], +) +def test_get_external_address_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAddress( + name="name_value", + internal_ip="internal_ip_value", + external_ip="external_ip_value", + state=vmwareengine_resources.ExternalAddress.State.ACTIVE, + uid="uid_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAddress.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_address(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.ExternalAddress) + assert response.name == "name_value" + assert response.internal_ip == "internal_ip_value" + assert response.external_ip == "external_ip_value" + assert response.state == vmwareengine_resources.ExternalAddress.State.ACTIVE + assert response.uid == "uid_value" + assert response.description == "description_value" + + +def test_get_external_address_rest_required_fields( + request_type=vmwareengine.GetExternalAddressRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAddress() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAddress.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_external_address(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_external_address_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_external_address._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_address_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_external_address" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_external_address" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetExternalAddressRequest.pb( + vmwareengine.GetExternalAddressRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.ExternalAddress.to_json( + vmwareengine_resources.ExternalAddress() + ) + + request = vmwareengine.GetExternalAddressRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.ExternalAddress() + + client.get_external_address( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_external_address_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetExternalAddressRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_address(request) + + +def test_get_external_address_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAddress() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAddress.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_address(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/externalAddresses/*}" + % client.transport._host, + args[1], + ) + + +def test_get_external_address_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_external_address( + vmwareengine.GetExternalAddressRequest(), + name="name_value", + ) + + +def test_get_external_address_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateExternalAddressRequest, + dict, + ], +) +def test_create_external_address_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request_init["external_address"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "internal_ip": "internal_ip_value", + "external_ip": "external_ip_value", + "state": 1, + "uid": "uid_value", + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateExternalAddressRequest.meta.fields[ + "external_address" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_address"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_address"][field])): + del request_init["external_address"][field][i][subfield] + else: + del request_init["external_address"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_address(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_external_address_rest_required_fields( + request_type=vmwareengine.CreateExternalAddressRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["external_address_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "externalAddressId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "externalAddressId" in jsonified_request + assert jsonified_request["externalAddressId"] == request_init["external_address_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["externalAddressId"] = "external_address_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_address._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "external_address_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "externalAddressId" in jsonified_request + assert jsonified_request["externalAddressId"] == "external_address_id_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_external_address(request) + + expected_params = [ + ( + "externalAddressId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_external_address_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_external_address._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "externalAddressId", + "requestId", + ) + ) + & set( + ( + "parent", + "externalAddress", + "externalAddressId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_address_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_external_address" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_create_external_address" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.CreateExternalAddressRequest.pb( + vmwareengine.CreateExternalAddressRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.CreateExternalAddressRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_external_address( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_address_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateExternalAddressRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_address(request) + + +def test_create_external_address_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_address(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/externalAddresses" + % client.transport._host, + args[1], + ) + + +def test_create_external_address_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_external_address( + vmwareengine.CreateExternalAddressRequest(), + parent="parent_value", + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + external_address_id="external_address_id_value", + ) + + +def test_create_external_address_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateExternalAddressRequest, + dict, + ], +) +def test_update_external_address_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "external_address": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + } + request_init["external_address"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "internal_ip": "internal_ip_value", + "external_ip": "external_ip_value", + "state": 1, + "uid": "uid_value", + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateExternalAddressRequest.meta.fields[ + "external_address" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_address"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_address"][field])): + del request_init["external_address"][field][i][subfield] + else: + del request_init["external_address"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_address(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_external_address_rest_required_fields( + request_type=vmwareengine.UpdateExternalAddressRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_address._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_external_address(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_external_address_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_external_address._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "externalAddress", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_address_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_external_address" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_external_address" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdateExternalAddressRequest.pb( + vmwareengine.UpdateExternalAddressRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdateExternalAddressRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_external_address( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_external_address_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateExternalAddressRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "external_address": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_address(request) + + +def test_update_external_address_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_address": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_address(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_address.name=projects/*/locations/*/privateClouds/*/externalAddresses/*}" + % client.transport._host, + args[1], + ) + + +def test_update_external_address_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_external_address( + vmwareengine.UpdateExternalAddressRequest(), + external_address=vmwareengine_resources.ExternalAddress(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_external_address_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteExternalAddressRequest, + dict, + ], +) +def test_delete_external_address_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_address(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_external_address_rest_required_fields( + request_type=vmwareengine.DeleteExternalAddressRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_address._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_address._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_external_address(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_external_address_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_external_address._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_address_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_external_address" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_delete_external_address" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.DeleteExternalAddressRequest.pb( + vmwareengine.DeleteExternalAddressRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.DeleteExternalAddressRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_external_address( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_external_address_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteExternalAddressRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_address(request) + + +def test_delete_external_address_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/externalAddresses/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_address(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/externalAddresses/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_external_address_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_external_address( + vmwareengine.DeleteExternalAddressRequest(), + name="name_value", + ) + + +def test_delete_external_address_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListSubnetsRequest, + dict, + ], +) +def test_list_subnets_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListSubnetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListSubnetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_subnets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubnetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_subnets_rest_required_fields( + request_type=vmwareengine.ListSubnetsRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subnets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subnets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListSubnetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListSubnetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_subnets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_subnets_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_subnets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_subnets_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_subnets" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_subnets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListSubnetsRequest.pb( + vmwareengine.ListSubnetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListSubnetsResponse.to_json( + vmwareengine.ListSubnetsResponse() + ) + + request = vmwareengine.ListSubnetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListSubnetsResponse() + + client.list_subnets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_subnets_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListSubnetsRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_subnets(request) + + +def test_list_subnets_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListSubnetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListSubnetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_subnets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/subnets" + % client.transport._host, + args[1], + ) + + +def test_list_subnets_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subnets( + vmwareengine.ListSubnetsRequest(), + parent="parent_value", + ) + + +def test_list_subnets_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + next_page_token="abc", + ), + vmwareengine.ListSubnetsResponse( + subnets=[], + next_page_token="def", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + ], + next_page_token="ghi", + ), + vmwareengine.ListSubnetsResponse( + subnets=[ + vmwareengine_resources.Subnet(), + vmwareengine_resources.Subnet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(vmwareengine.ListSubnetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_subnets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) + + pages = list(client.list_subnets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetSubnetRequest, + dict, + ], +) +def test_get_subnet_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Subnet( + name="name_value", + ip_cidr_range="ip_cidr_range_value", + gateway_ip="gateway_ip_value", + type_="type__value", + state=vmwareengine_resources.Subnet.State.ACTIVE, + vlan_id=733, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Subnet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_subnet(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.Subnet) + assert response.name == "name_value" + assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.gateway_ip == "gateway_ip_value" + assert response.type_ == "type__value" + assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert response.vlan_id == 733 + + +def test_get_subnet_rest_required_fields(request_type=vmwareengine.GetSubnetRequest): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subnet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subnet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Subnet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.Subnet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_subnet(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_subnet_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_subnet._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_subnet_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_subnet" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_subnet" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetSubnetRequest.pb(vmwareengine.GetSubnetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.Subnet.to_json( + vmwareengine_resources.Subnet() + ) + + request = vmwareengine.GetSubnetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.Subnet() + + client.get_subnet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_subnet_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetSubnetRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_subnet(request) + + +def test_get_subnet_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Subnet() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Subnet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_subnet(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/subnets/*}" + % client.transport._host, + args[1], + ) + + +def test_get_subnet_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subnet( + vmwareengine.GetSubnetRequest(), + name="name_value", + ) + + +def test_get_subnet_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateSubnetRequest, + dict, + ], +) +def test_update_subnet_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "subnet": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + } + request_init["subnet"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4", + "ip_cidr_range": "ip_cidr_range_value", + "gateway_ip": "gateway_ip_value", + "type_": "type__value", + "state": 1, + "vlan_id": 733, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateSubnetRequest.meta.fields["subnet"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["subnet"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["subnet"][field])): + del request_init["subnet"][field][i][subfield] + else: + del request_init["subnet"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_subnet(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_subnet_rest_required_fields( + request_type=vmwareengine.UpdateSubnetRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subnet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subnet._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_subnet(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_subnet_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_subnet._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "subnet", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_subnet_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_subnet" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_subnet" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdateSubnetRequest.pb( + vmwareengine.UpdateSubnetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdateSubnetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_subnet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_subnet_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateSubnetRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "subnet": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_subnet(request) + + +def test_update_subnet_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "subnet": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_subnet(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subnet.name=projects/*/locations/*/privateClouds/*/subnets/*}" + % client.transport._host, + args[1], + ) + + +def test_update_subnet_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subnet( + vmwareengine.UpdateSubnetRequest(), + subnet=vmwareengine_resources.Subnet(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_subnet_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListExternalAccessRulesRequest, + dict, + ], +) +def test_list_external_access_rules_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAccessRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAccessRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_access_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExternalAccessRulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_external_access_rules_rest_required_fields( + request_type=vmwareengine.ListExternalAccessRulesRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_access_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_access_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAccessRulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAccessRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_external_access_rules(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_external_access_rules_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_external_access_rules._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_access_rules_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_external_access_rules" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_external_access_rules" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListExternalAccessRulesRequest.pb( + vmwareengine.ListExternalAccessRulesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + vmwareengine.ListExternalAccessRulesResponse.to_json( + vmwareengine.ListExternalAccessRulesResponse() + ) + ) + + request = vmwareengine.ListExternalAccessRulesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListExternalAccessRulesResponse() + + client.list_external_access_rules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_access_rules_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListExternalAccessRulesRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_access_rules(request) + + +def test_list_external_access_rules_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListExternalAccessRulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListExternalAccessRulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_access_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/networkPolicies/*}/externalAccessRules" + % client.transport._host, + args[1], + ) + + +def test_list_external_access_rules_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_external_access_rules( + vmwareengine.ListExternalAccessRulesRequest(), + parent="parent_value", + ) + + +def test_list_external_access_rules_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + ], + next_page_token="abc", + ), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[], + next_page_token="def", + ), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + ], + next_page_token="ghi", + ), + vmwareengine.ListExternalAccessRulesResponse( + external_access_rules=[ + vmwareengine_resources.ExternalAccessRule(), + vmwareengine_resources.ExternalAccessRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListExternalAccessRulesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + + pager = client.list_external_access_rules(request=sample_request) + + results = list(pager) + assert len(results) == 6 assert all( - isinstance(i, vmwareengine_resources.PeeringRoute) for i in responses + isinstance(i, vmwareengine_resources.ExternalAccessRule) for i in results + ) + + pages = list(client.list_external_access_rules(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetExternalAccessRuleRequest, + dict, + ], +) +def test_get_external_access_rule_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAccessRule( + name="name_value", + description="description_value", + priority=898, + action=vmwareengine_resources.ExternalAccessRule.Action.ALLOW, + ip_protocol="ip_protocol_value", + source_ports=["source_ports_value"], + destination_ports=["destination_ports_value"], + state=vmwareengine_resources.ExternalAccessRule.State.ACTIVE, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAccessRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_access_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.ExternalAccessRule) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.priority == 898 + assert response.action == vmwareengine_resources.ExternalAccessRule.Action.ALLOW + assert response.ip_protocol == "ip_protocol_value" + assert response.source_ports == ["source_ports_value"] + assert response.destination_ports == ["destination_ports_value"] + assert response.state == vmwareengine_resources.ExternalAccessRule.State.ACTIVE + assert response.uid == "uid_value" + + +def test_get_external_access_rule_rest_required_fields( + request_type=vmwareengine.GetExternalAccessRuleRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_access_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_access_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAccessRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAccessRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_external_access_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_external_access_rule_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_external_access_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_access_rule_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_external_access_rule" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_external_access_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetExternalAccessRuleRequest.pb( + vmwareengine.GetExternalAccessRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.ExternalAccessRule.to_json( + vmwareengine_resources.ExternalAccessRule() + ) + + request = vmwareengine.GetExternalAccessRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.ExternalAccessRule() + + client.get_external_access_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_external_access_rule_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetExternalAccessRuleRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_access_rule(request) + + +def test_get_external_access_rule_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.ExternalAccessRule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ExternalAccessRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_access_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}" + % client.transport._host, + args[1], + ) + + +def test_get_external_access_rule_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_external_access_rule( + vmwareengine.GetExternalAccessRuleRequest(), + name="name_value", + ) + + +def test_get_external_access_rule_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateExternalAccessRuleRequest, + dict, + ], +) +def test_create_external_access_rule_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request_init["external_access_rule"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "priority": 898, + "action": 1, + "ip_protocol": "ip_protocol_value", + "source_ip_ranges": [ + { + "ip_address": "ip_address_value", + "ip_address_range": "ip_address_range_value", + "external_address": "external_address_value", + } + ], + "source_ports": ["source_ports_value1", "source_ports_value2"], + "destination_ip_ranges": {}, + "destination_ports": ["destination_ports_value1", "destination_ports_value2"], + "state": 1, + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateExternalAccessRuleRequest.meta.fields[ + "external_access_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "external_access_rule" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_access_rule"][field])): + del request_init["external_access_rule"][field][i][subfield] + else: + del request_init["external_access_rule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_access_rule(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_external_access_rule_rest_required_fields( + request_type=vmwareengine.CreateExternalAccessRuleRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["external_access_rule_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "externalAccessRuleId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_access_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "externalAccessRuleId" in jsonified_request + assert ( + jsonified_request["externalAccessRuleId"] + == request_init["external_access_rule_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["externalAccessRuleId"] = "external_access_rule_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_access_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "external_access_rule_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "externalAccessRuleId" in jsonified_request + assert jsonified_request["externalAccessRuleId"] == "external_access_rule_id_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_external_access_rule(request) + + expected_params = [ + ( + "externalAccessRuleId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_external_access_rule_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_external_access_rule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "externalAccessRuleId", + "requestId", + ) + ) + & set( + ( + "parent", + "externalAccessRule", + "externalAccessRuleId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_access_rule_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_external_access_rule" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_create_external_access_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.CreateExternalAccessRuleRequest.pb( + vmwareengine.CreateExternalAccessRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.CreateExternalAccessRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_external_access_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_access_rule_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateExternalAccessRuleRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_access_rule(request) + + +def test_create_external_access_rule_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_access_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/networkPolicies/*}/externalAccessRules" + % client.transport._host, + args[1], + ) + + +def test_create_external_access_rule_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_external_access_rule( + vmwareengine.CreateExternalAccessRuleRequest(), + parent="parent_value", + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + external_access_rule_id="external_access_rule_id_value", + ) + + +def test_create_external_access_rule_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateExternalAccessRuleRequest, + dict, + ], +) +def test_update_external_access_rule_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "external_access_rule": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + } + request_init["external_access_rule"] = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "priority": 898, + "action": 1, + "ip_protocol": "ip_protocol_value", + "source_ip_ranges": [ + { + "ip_address": "ip_address_value", + "ip_address_range": "ip_address_range_value", + "external_address": "external_address_value", + } + ], + "source_ports": ["source_ports_value1", "source_ports_value2"], + "destination_ip_ranges": {}, + "destination_ports": ["destination_ports_value1", "destination_ports_value2"], + "state": 1, + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateExternalAccessRuleRequest.meta.fields[ + "external_access_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "external_access_rule" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_access_rule"][field])): + del request_init["external_access_rule"][field][i][subfield] + else: + del request_init["external_access_rule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_access_rule(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_external_access_rule_rest_required_fields( + request_type=vmwareengine.UpdateExternalAccessRuleRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_access_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_access_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_external_access_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_external_access_rule_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_external_access_rule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "externalAccessRule", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_access_rule_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_external_access_rule" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_external_access_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdateExternalAccessRuleRequest.pb( + vmwareengine.UpdateExternalAccessRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdateExternalAccessRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_external_access_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_external_access_rule_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateExternalAccessRuleRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "external_access_rule": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_access_rule(request) + + +def test_update_external_access_rule_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_access_rule": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_access_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_access_rule.name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}" + % client.transport._host, + args[1], + ) + + +def test_update_external_access_rule_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_external_access_rule( + vmwareengine.UpdateExternalAccessRuleRequest(), + external_access_rule=vmwareengine_resources.ExternalAccessRule( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_external_access_rule_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteExternalAccessRuleRequest, + dict, + ], +) +def test_delete_external_access_rule_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_access_rule(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_external_access_rule_rest_required_fields( + request_type=vmwareengine.DeleteExternalAccessRuleRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_access_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_access_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_external_access_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_external_access_rule_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_external_access_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_access_rule_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_external_access_rule" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_delete_external_access_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.DeleteExternalAccessRuleRequest.pb( + vmwareengine.DeleteExternalAccessRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.DeleteExternalAccessRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_external_access_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_external_access_rule_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteExternalAccessRuleRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_access_rule(request) + + +def test_delete_external_access_rule_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3/externalAccessRules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_access_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/networkPolicies/*/externalAccessRules/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_external_access_rule_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_external_access_rule( + vmwareengine.DeleteExternalAccessRuleRequest(), + name="name_value", + ) + + +def test_delete_external_access_rule_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListLoggingServersRequest, + dict, + ], +) +def test_list_logging_servers_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListLoggingServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListLoggingServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_logging_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLoggingServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_logging_servers_rest_required_fields( + request_type=vmwareengine.ListLoggingServersRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_logging_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_logging_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListLoggingServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListLoggingServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_logging_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_logging_servers_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_logging_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_logging_servers_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_logging_servers" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_logging_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListLoggingServersRequest.pb( + vmwareengine.ListLoggingServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListLoggingServersResponse.to_json( + vmwareengine.ListLoggingServersResponse() + ) + + request = vmwareengine.ListLoggingServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListLoggingServersResponse() + + client.list_logging_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_logging_servers_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListLoggingServersRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_logging_servers(request) + + +def test_list_logging_servers_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListLoggingServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListLoggingServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_logging_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/loggingServers" + % client.transport._host, + args[1], + ) + + +def test_list_logging_servers_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logging_servers( + vmwareengine.ListLoggingServersRequest(), + parent="parent_value", + ) + + +def test_list_logging_servers_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + next_page_token="abc", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[], + next_page_token="def", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + ], + next_page_token="ghi", + ), + vmwareengine.ListLoggingServersResponse( + logging_servers=[ + vmwareengine_resources.LoggingServer(), + vmwareengine_resources.LoggingServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListLoggingServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_logging_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.LoggingServer) for i in results) + + pages = list(client.list_logging_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetLoggingServerRequest, + dict, + ], +) +def test_get_logging_server_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.LoggingServer( + name="name_value", + hostname="hostname_value", + port=453, + protocol=vmwareengine_resources.LoggingServer.Protocol.UDP, + source_type=vmwareengine_resources.LoggingServer.SourceType.ESXI, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.LoggingServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_logging_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.LoggingServer) + assert response.name == "name_value" + assert response.hostname == "hostname_value" + assert response.port == 453 + assert response.protocol == vmwareengine_resources.LoggingServer.Protocol.UDP + assert response.source_type == vmwareengine_resources.LoggingServer.SourceType.ESXI + assert response.uid == "uid_value" + + +def test_get_logging_server_rest_required_fields( + request_type=vmwareengine.GetLoggingServerRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_logging_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_logging_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.LoggingServer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.LoggingServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_logging_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_logging_server_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_logging_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_logging_server_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_logging_server" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_logging_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetLoggingServerRequest.pb( + vmwareengine.GetLoggingServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.LoggingServer.to_json( + vmwareengine_resources.LoggingServer() + ) + + request = vmwareengine.GetLoggingServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.LoggingServer() + + client.get_logging_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_logging_server_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetLoggingServerRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_logging_server(request) + + +def test_get_logging_server_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.LoggingServer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.LoggingServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_logging_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/loggingServers/*}" + % client.transport._host, + args[1], + ) + + +def test_get_logging_server_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_logging_server( + vmwareengine.GetLoggingServerRequest(), + name="name_value", + ) + + +def test_get_logging_server_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateLoggingServerRequest, + dict, + ], +) +def test_create_logging_server_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request_init["logging_server"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "hostname": "hostname_value", + "port": 453, + "protocol": 1, + "source_type": 1, + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateLoggingServerRequest.meta.fields["logging_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["logging_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["logging_server"][field])): + del request_init["logging_server"][field][i][subfield] + else: + del request_init["logging_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_logging_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_logging_server_rest_required_fields( + request_type=vmwareengine.CreateLoggingServerRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["logging_server_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "loggingServerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_logging_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "loggingServerId" in jsonified_request + assert jsonified_request["loggingServerId"] == request_init["logging_server_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["loggingServerId"] = "logging_server_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_logging_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "logging_server_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "loggingServerId" in jsonified_request + assert jsonified_request["loggingServerId"] == "logging_server_id_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_logging_server(request) + + expected_params = [ + ( + "loggingServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_logging_server_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_logging_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "loggingServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "loggingServer", + "loggingServerId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_logging_server_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_logging_server" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_create_logging_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.CreateLoggingServerRequest.pb( + vmwareengine.CreateLoggingServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.CreateLoggingServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_logging_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_logging_server_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateLoggingServerRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_logging_server(request) + + +def test_create_logging_server_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_logging_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/loggingServers" + % client.transport._host, + args[1], + ) + + +def test_create_logging_server_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_logging_server( + vmwareengine.CreateLoggingServerRequest(), + parent="parent_value", + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + logging_server_id="logging_server_id_value", + ) + + +def test_create_logging_server_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.UpdateLoggingServerRequest, + dict, + ], +) +def test_update_logging_server_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "logging_server": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + } + request_init["logging_server"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "hostname": "hostname_value", + "port": 453, + "protocol": 1, + "source_type": 1, + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateLoggingServerRequest.meta.fields["logging_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["logging_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["logging_server"][field])): + del request_init["logging_server"][field][i][subfield] + else: + del request_init["logging_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_logging_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_logging_server_rest_required_fields( + request_type=vmwareengine.UpdateLoggingServerRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_logging_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_logging_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_logging_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_logging_server_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_logging_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "loggingServer", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_logging_server_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_logging_server" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_update_logging_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.UpdateLoggingServerRequest.pb( + vmwareengine.UpdateLoggingServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.UpdateLoggingServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_logging_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_logging_server_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateLoggingServerRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "logging_server": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_logging_server(request) + + +def test_update_logging_server_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "logging_server": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_logging_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{logging_server.name=projects/*/locations/*/privateClouds/*/loggingServers/*}" + % client.transport._host, + args[1], + ) + + +def test_update_logging_server_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_logging_server( + vmwareengine.UpdateLoggingServerRequest(), + logging_server=vmwareengine_resources.LoggingServer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_logging_server_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.DeleteLoggingServerRequest, + dict, + ], +) +def test_delete_logging_server_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_logging_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_logging_server_rest_required_fields( + request_type=vmwareengine.DeleteLoggingServerRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_logging_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_logging_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_logging_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_logging_server_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_logging_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_logging_server_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_logging_server" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_delete_logging_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.DeleteLoggingServerRequest.pb( + vmwareengine.DeleteLoggingServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = vmwareengine.DeleteLoggingServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_logging_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_logging_server_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteLoggingServerRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_logging_server(request) + + +def test_delete_logging_server_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/loggingServers/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_logging_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateClouds/*/loggingServers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_logging_server_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_logging_server( + vmwareengine.DeleteLoggingServerRequest(), + name="name_value", + ) + + +def test_delete_logging_server_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ListNodeTypesRequest, + dict, + ], +) +def test_list_node_types_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodeTypesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_node_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNodeTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_node_types_rest_required_fields( + request_type=vmwareengine.ListNodeTypesRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_node_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_node_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodeTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_node_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_node_types_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_node_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_node_types_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_list_node_types" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_list_node_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ListNodeTypesRequest.pb( + vmwareengine.ListNodeTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine.ListNodeTypesResponse.to_json( + vmwareengine.ListNodeTypesResponse() + ) + + request = vmwareengine.ListNodeTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine.ListNodeTypesResponse() + + client.list_node_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_node_types_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListNodeTypesRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_node_types(request) + + +def test_list_node_types_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine.ListNodeTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_node_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/nodeTypes" % client.transport._host, + args[1], + ) + + +def test_list_node_types_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_node_types( + vmwareengine.ListNodeTypesRequest(), + parent="parent_value", + ) + + +def test_list_node_types_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + next_page_token="abc", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[], + next_page_token="def", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNodeTypesResponse( + node_types=[ + vmwareengine_resources.NodeType(), + vmwareengine_resources.NodeType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListNodeTypesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_node_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) + + pages = list(client.list_node_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetNodeTypeRequest, + dict, + ], +) +def test_get_node_type_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/nodeTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.NodeType( + name="name_value", + node_type_id="node_type_id_value", + display_name="display_name_value", + virtual_cpu_count=1846, + total_core_count=1716, + memory_gb=961, + disk_size_gb=1261, + available_custom_core_counts=[2974], + kind=vmwareengine_resources.NodeType.Kind.STANDARD, + families=["families_value"], + capabilities=[ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_node_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.NodeType) + assert response.name == "name_value" + assert response.node_type_id == "node_type_id_value" + assert response.display_name == "display_name_value" + assert response.virtual_cpu_count == 1846 + assert response.total_core_count == 1716 + assert response.memory_gb == 961 + assert response.disk_size_gb == 1261 + assert response.available_custom_core_counts == [2974] + assert response.kind == vmwareengine_resources.NodeType.Kind.STANDARD + assert response.families == ["families_value"] + assert response.capabilities == [ + vmwareengine_resources.NodeType.Capability.STRETCHED_CLUSTERS + ] + + +def test_get_node_type_rest_required_fields( + request_type=vmwareengine.GetNodeTypeRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.NodeType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_node_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_node_type_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_node_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_node_type_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_get_node_type" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_get_node_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.GetNodeTypeRequest.pb( + vmwareengine.GetNodeTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.NodeType.to_json( + vmwareengine_resources.NodeType() + ) + + request = vmwareengine.GetNodeTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.NodeType() + + client.get_node_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_node_type_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetNodeTypeRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/nodeTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_node_type(request) + + +def test_get_node_type_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.NodeType() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/nodeTypes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_node_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/nodeTypes/*}" % client.transport._host, + args[1], + ) + + +def test_get_node_type_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_type( + vmwareengine.GetNodeTypeRequest(), + name="name_value", + ) + + +def test_get_node_type_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ShowNsxCredentialsRequest, + dict, + ], +) +def test_show_nsx_credentials_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Credentials( + username="username_value", + password="password_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Credentials.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.show_nsx_credentials(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" + + +def test_show_nsx_credentials_rest_required_fields( + request_type=vmwareengine.ShowNsxCredentialsRequest, +): + transport_class = transports.VmwareEngineRestTransport + + request_init = {} + request_init["private_cloud"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).show_nsx_credentials._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["privateCloud"] = "private_cloud_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).show_nsx_credentials._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "privateCloud" in jsonified_request + assert jsonified_request["privateCloud"] == "private_cloud_value" + + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Credentials() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.Credentials.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.show_nsx_credentials(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_show_nsx_credentials_rest_unset_required_fields(): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.show_nsx_credentials._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("privateCloud",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_show_nsx_credentials_rest_interceptors(null_interceptor): + transport = transports.VmwareEngineRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VmwareEngineRestInterceptor(), + ) + client = VmwareEngineClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_show_nsx_credentials" + ) as post, mock.patch.object( + transports.VmwareEngineRestInterceptor, "pre_show_nsx_credentials" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vmwareengine.ShowNsxCredentialsRequest.pb( + vmwareengine.ShowNsxCredentialsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vmwareengine_resources.Credentials.to_json( + vmwareengine_resources.Credentials() + ) + + request = vmwareengine.ShowNsxCredentialsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vmwareengine_resources.Credentials() + + client.show_nsx_credentials( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_show_nsx_credentials_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ShowNsxCredentialsRequest +): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.show_nsx_credentials(request) + + +def test_show_nsx_credentials_rest_flattened(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vmwareengine_resources.Credentials() + + # get arguments that satisfy an http rule for this method + sample_request = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + private_cloud="private_cloud_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Credentials.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.show_nsx_credentials(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showNsxCredentials" + % client.transport._host, + args[1], ) -@pytest.mark.asyncio -async def test_list_private_connection_peering_routes_async_pages(): - client = VmwareEngineAsyncClient( - credentials=ga_credentials.AnonymousCredentials, +def test_show_nsx_credentials_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_private_connection_peering_routes), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.show_nsx_credentials( + vmwareengine.ShowNsxCredentialsRequest(), + private_cloud="private_cloud_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_private_connection_peering_routes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + + +def test_show_nsx_credentials_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListPrivateCloudsRequest, + vmwareengine.ShowVcenterCredentialsRequest, dict, ], ) -def test_list_private_clouds_rest(request_type): +def test_show_vcenter_credentials_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateCloudsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = vmwareengine_resources.Credentials( + username="username_value", + password="password_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) + return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_clouds(request) + response = client.show_vcenter_credentials(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateCloudsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, vmwareengine_resources.Credentials) + assert response.username == "username_value" + assert response.password == "password_value" -def test_list_private_clouds_rest_required_fields( - request_type=vmwareengine.ListPrivateCloudsRequest, +def test_show_vcenter_credentials_rest_required_fields( + request_type=vmwareengine.ShowVcenterCredentialsRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" + request_init["private_cloud"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12355,30 +34533,23 @@ def test_list_private_clouds_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_clouds._get_unset_required_fields(jsonified_request) + ).show_vcenter_credentials._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["privateCloud"] = "private_cloud_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_clouds._get_unset_required_fields(jsonified_request) + ).show_vcenter_credentials._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("username",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "privateCloud" in jsonified_request + assert jsonified_request["privateCloud"] == "private_cloud_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12387,7 +34558,7 @@ def test_list_private_clouds_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateCloudsResponse() + return_value = vmwareengine_resources.Credentials() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12408,40 +34579,30 @@ def test_list_private_clouds_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) + return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_clouds(request) + response = client.show_vcenter_credentials(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_private_clouds_rest_unset_required_fields(): +def test_show_vcenter_credentials_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_private_clouds._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.show_vcenter_credentials._get_unset_required_fields({}) + assert set(unset_fields) == (set(("username",)) & set(("privateCloud",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_private_clouds_rest_interceptors(null_interceptor): +def test_show_vcenter_credentials_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12454,14 +34615,14 @@ def test_list_private_clouds_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_private_clouds" + transports.VmwareEngineRestInterceptor, "post_show_vcenter_credentials" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_private_clouds" + transports.VmwareEngineRestInterceptor, "pre_show_vcenter_credentials" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListPrivateCloudsRequest.pb( - vmwareengine.ListPrivateCloudsRequest() + pb_message = vmwareengine.ShowVcenterCredentialsRequest.pb( + vmwareengine.ShowVcenterCredentialsRequest() ) transcode.return_value = { "method": "post", @@ -12473,19 +34634,19 @@ def test_list_private_clouds_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListPrivateCloudsResponse.to_json( - vmwareengine.ListPrivateCloudsResponse() + req.return_value._content = vmwareengine_resources.Credentials.to_json( + vmwareengine_resources.Credentials() ) - request = vmwareengine.ListPrivateCloudsRequest() + request = vmwareengine.ShowVcenterCredentialsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListPrivateCloudsResponse() + post.return_value = vmwareengine_resources.Credentials() - client.list_private_clouds( + client.show_vcenter_credentials( request, metadata=[ ("key", "val"), @@ -12497,8 +34658,8 @@ def test_list_private_clouds_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_private_clouds_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListPrivateCloudsRequest +def test_show_vcenter_credentials_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ShowVcenterCredentialsRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12506,7 +34667,9 @@ def test_list_private_clouds_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12518,10 +34681,10 @@ def test_list_private_clouds_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_private_clouds(request) + client.show_vcenter_credentials(request) -def test_list_private_clouds_rest_flattened(): +def test_show_vcenter_credentials_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12530,172 +34693,106 @@ def test_list_private_clouds_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateCloudsResponse() + return_value = vmwareengine_resources.Credentials() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + private_cloud="private_cloud_value", ) mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateCloudsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_private_clouds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/privateClouds" - % client.transport._host, - args[1], - ) - - -def test_list_private_clouds_rest_flattened_error(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_private_clouds( - vmwareengine.ListPrivateCloudsRequest(), - parent="parent_value", - ) - - -def test_list_private_clouds_rest_pager(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListPrivateCloudsResponse( - private_clouds=[ - vmwareengine_resources.PrivateCloud(), - vmwareengine_resources.PrivateCloud(), - vmwareengine_resources.PrivateCloud(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateCloudsResponse( - private_clouds=[], - next_page_token="def", - ), - vmwareengine.ListPrivateCloudsResponse( - private_clouds=[ - vmwareengine_resources.PrivateCloud(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateCloudsResponse( - private_clouds=[ - vmwareengine_resources.PrivateCloud(), - vmwareengine_resources.PrivateCloud(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - vmwareengine.ListPrivateCloudsResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.Credentials.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.show_vcenter_credentials(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showVcenterCredentials" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_private_clouds(request=sample_request) +def test_show_vcenter_credentials_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.PrivateCloud) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.show_vcenter_credentials( + vmwareengine.ShowVcenterCredentialsRequest(), + private_cloud="private_cloud_value", + ) - pages = list(client.list_private_clouds(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_show_vcenter_credentials_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetPrivateCloudRequest, + vmwareengine.ResetNsxCredentialsRequest, dict, ], ) -def test_get_private_cloud_rest(request_type): +def test_reset_nsx_credentials_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateCloud( - name="name_value", - state=vmwareengine_resources.PrivateCloud.State.ACTIVE, - description="description_value", - uid="uid_value", - type_=vmwareengine_resources.PrivateCloud.Type.TIME_LIMITED, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_private_cloud(request) + response = client.reset_nsx_credentials(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.PrivateCloud) - assert response.name == "name_value" - assert response.state == vmwareengine_resources.PrivateCloud.State.ACTIVE - assert response.description == "description_value" - assert response.uid == "uid_value" - assert response.type_ == vmwareengine_resources.PrivateCloud.Type.TIME_LIMITED + assert response.operation.name == "operations/spam" -def test_get_private_cloud_rest_required_fields( - request_type=vmwareengine.GetPrivateCloudRequest, +def test_reset_nsx_credentials_rest_required_fields( + request_type=vmwareengine.ResetNsxCredentialsRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["private_cloud"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12710,21 +34807,21 @@ def test_get_private_cloud_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_private_cloud._get_unset_required_fields(jsonified_request) + ).reset_nsx_credentials._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["privateCloud"] = "private_cloud_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_private_cloud._get_unset_required_fields(jsonified_request) + ).reset_nsx_credentials._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "privateCloud" in jsonified_request + assert jsonified_request["privateCloud"] == "private_cloud_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12733,7 +34830,7 @@ def test_get_private_cloud_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateCloud() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12745,39 +34842,37 @@ def test_get_private_cloud_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_private_cloud(request) + response = client.reset_nsx_credentials(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_private_cloud_rest_unset_required_fields(): +def test_reset_nsx_credentials_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_private_cloud._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.reset_nsx_credentials._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("privateCloud",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_private_cloud_rest_interceptors(null_interceptor): +def test_reset_nsx_credentials_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12790,14 +34885,16 @@ def test_get_private_cloud_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_private_cloud" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_reset_nsx_credentials" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_private_cloud" + transports.VmwareEngineRestInterceptor, "pre_reset_nsx_credentials" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetPrivateCloudRequest.pb( - vmwareengine.GetPrivateCloudRequest() + pb_message = vmwareengine.ResetNsxCredentialsRequest.pb( + vmwareengine.ResetNsxCredentialsRequest() ) transcode.return_value = { "method": "post", @@ -12809,19 +34906,19 @@ def test_get_private_cloud_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.PrivateCloud.to_json( - vmwareengine_resources.PrivateCloud() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.GetPrivateCloudRequest() + request = vmwareengine.ResetNsxCredentialsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.PrivateCloud() + post.return_value = operations_pb2.Operation() - client.get_private_cloud( + client.reset_nsx_credentials( request, metadata=[ ("key", "val"), @@ -12833,8 +34930,8 @@ def test_get_private_cloud_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_private_cloud_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetPrivateCloudRequest +def test_reset_nsx_credentials_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ResetNsxCredentialsRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12842,7 +34939,9 @@ def test_get_private_cloud_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12854,10 +34953,10 @@ def test_get_private_cloud_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_private_cloud(request) + client.reset_nsx_credentials(request) -def test_get_private_cloud_rest_flattened(): +def test_reset_nsx_credentials_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12866,42 +34965,40 @@ def test_get_private_cloud_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateCloud() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + private_cloud="private_cloud_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateCloud.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_private_cloud(**mock_args) + client.reset_nsx_credentials(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*}" + "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetNsxCredentials" % client.transport._host, args[1], ) -def test_get_private_cloud_rest_flattened_error(transport: str = "rest"): +def test_reset_nsx_credentials_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12910,139 +35007,35 @@ def test_get_private_cloud_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_private_cloud( - vmwareengine.GetPrivateCloudRequest(), - name="name_value", - ) - - -def test_get_private_cloud_rest_error(): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.CreatePrivateCloudRequest, - dict, - ], -) -def test_create_private_cloud_rest(request_type): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["private_cloud"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "expire_time": {}, - "state": 1, - "network_config": { - "management_cidr": "management_cidr_value", - "vmware_engine_network": "vmware_engine_network_value", - "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", - "management_ip_address_layout_version": 3836, - }, - "management_cluster": { - "cluster_id": "cluster_id_value", - "node_type_configs": {}, - }, - "description": "description_value", - "hcx": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "nsx": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "vcenter": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "uid": "uid_value", - "type_": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreatePrivateCloudRequest.meta.fields["private_cloud"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.reset_nsx_credentials( + vmwareengine.ResetNsxCredentialsRequest(), + private_cloud="private_cloud_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["private_cloud"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_reset_nsx_credentials_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["private_cloud"][field])): - del request_init["private_cloud"][field][i][subfield] - else: - del request_init["private_cloud"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.ResetVcenterCredentialsRequest, + dict, + ], +) +def test_reset_vcenter_credentials_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13057,20 +35050,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_private_cloud(request) + response = client.reset_vcenter_credentials(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_private_cloud_rest_required_fields( - request_type=vmwareengine.CreatePrivateCloudRequest, +def test_reset_vcenter_credentials_rest_required_fields( + request_type=vmwareengine.ResetVcenterCredentialsRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" - request_init["private_cloud_id"] = "" + request_init["private_cloud"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13082,38 +35074,24 @@ def test_create_private_cloud_rest_required_fields( ) # verify fields with default values are dropped - assert "privateCloudId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_private_cloud._get_unset_required_fields(jsonified_request) + ).reset_vcenter_credentials._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "privateCloudId" in jsonified_request - assert jsonified_request["privateCloudId"] == request_init["private_cloud_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["privateCloudId"] = "private_cloud_id_value" + jsonified_request["privateCloud"] = "private_cloud_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_private_cloud._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "private_cloud_id", - "request_id", - "validate_only", - ) - ) + ).reset_vcenter_credentials._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "privateCloudId" in jsonified_request - assert jsonified_request["privateCloudId"] == "private_cloud_id_value" + assert "privateCloud" in jsonified_request + assert jsonified_request["privateCloud"] == "private_cloud_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13147,45 +35125,24 @@ def test_create_private_cloud_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_private_cloud(request) + response = client.reset_vcenter_credentials(request) - expected_params = [ - ( - "privateCloudId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_private_cloud_rest_unset_required_fields(): +def test_reset_vcenter_credentials_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_private_cloud._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "privateCloudId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "privateCloudId", - "privateCloud", - ) - ) - ) + unset_fields = transport.reset_vcenter_credentials._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("privateCloud",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_private_cloud_rest_interceptors(null_interceptor): +def test_reset_vcenter_credentials_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13200,14 +35157,14 @@ def test_create_private_cloud_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_private_cloud" + transports.VmwareEngineRestInterceptor, "post_reset_vcenter_credentials" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_private_cloud" + transports.VmwareEngineRestInterceptor, "pre_reset_vcenter_credentials" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.CreatePrivateCloudRequest.pb( - vmwareengine.CreatePrivateCloudRequest() + pb_message = vmwareengine.ResetVcenterCredentialsRequest.pb( + vmwareengine.ResetVcenterCredentialsRequest() ) transcode.return_value = { "method": "post", @@ -13223,7 +35180,7 @@ def test_create_private_cloud_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.CreatePrivateCloudRequest() + request = vmwareengine.ResetVcenterCredentialsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13231,7 +35188,7 @@ def test_create_private_cloud_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_private_cloud( + client.reset_vcenter_credentials( request, metadata=[ ("key", "val"), @@ -13243,8 +35200,8 @@ def test_create_private_cloud_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_private_cloud_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreatePrivateCloudRequest +def test_reset_vcenter_credentials_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ResetVcenterCredentialsRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13252,7 +35209,9 @@ def test_create_private_cloud_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13264,10 +35223,10 @@ def test_create_private_cloud_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_private_cloud(request) + client.reset_vcenter_credentials(request) -def test_create_private_cloud_rest_flattened(): +def test_reset_vcenter_credentials_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13279,210 +35238,106 @@ def test_create_private_cloud_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), - private_cloud_id="private_cloud_id_value", + private_cloud="private_cloud_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_private_cloud(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/privateClouds" - % client.transport._host, - args[1], - ) - - -def test_create_private_cloud_rest_flattened_error(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_private_cloud( - vmwareengine.CreatePrivateCloudRequest(), - parent="parent_value", - private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), - private_cloud_id="private_cloud_id_value", - ) - - -def test_create_private_cloud_rest_error(): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.UpdatePrivateCloudRequest, - dict, - ], -) -def test_update_private_cloud_rest(request_type): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "private_cloud": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" - } - } - request_init["private_cloud"] = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "delete_time": {}, - "expire_time": {}, - "state": 1, - "network_config": { - "management_cidr": "management_cidr_value", - "vmware_engine_network": "vmware_engine_network_value", - "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", - "management_ip_address_layout_version": 3836, - }, - "management_cluster": { - "cluster_id": "cluster_id_value", - "node_type_configs": {}, - }, - "description": "description_value", - "hcx": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "nsx": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "vcenter": { - "internal_ip": "internal_ip_value", - "version": "version_value", - "state": 1, - "fqdn": "fqdn_value", - }, - "uid": "uid_value", - "type_": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdatePrivateCloudRequest.meta.fields["private_cloud"] + client.reset_vcenter_credentials(**mock_args) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetVcenterCredentials" + % client.transport._host, + args[1], + ) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +def test_reset_vcenter_credentials_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_vcenter_credentials( + vmwareengine.ResetVcenterCredentialsRequest(), + private_cloud="private_cloud_value", + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["private_cloud"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_reset_vcenter_credentials_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["private_cloud"][field])): - del request_init["private_cloud"][field][i][subfield] - else: - del request_init["private_cloud"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.GetDnsForwardingRequest, + dict, + ], +) +def test_get_dns_forwarding_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsForwarding( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsForwarding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_private_cloud(request) + response = client.get_dns_forwarding(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.DnsForwarding) + assert response.name == "name_value" -def test_update_private_cloud_rest_required_fields( - request_type=vmwareengine.UpdatePrivateCloudRequest, +def test_get_dns_forwarding_rest_required_fields( + request_type=vmwareengine.GetDnsForwardingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13497,24 +35352,21 @@ def test_update_private_cloud_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_private_cloud._get_unset_required_fields(jsonified_request) + ).get_dns_forwarding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_private_cloud._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + ).get_dns_forwarding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13523,7 +35375,7 @@ def test_update_private_cloud_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsForwarding() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13535,50 +35387,39 @@ def test_update_private_cloud_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsForwarding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_private_cloud(request) + response = client.get_dns_forwarding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_private_cloud_rest_unset_required_fields(): +def test_get_dns_forwarding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_private_cloud._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "privateCloud", - "updateMask", - ) - ) - ) + unset_fields = transport.get_dns_forwarding._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_private_cloud_rest_interceptors(null_interceptor): +def test_get_dns_forwarding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13591,16 +35432,14 @@ def test_update_private_cloud_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_private_cloud" + transports.VmwareEngineRestInterceptor, "post_get_dns_forwarding" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_private_cloud" + transports.VmwareEngineRestInterceptor, "pre_get_dns_forwarding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdatePrivateCloudRequest.pb( - vmwareengine.UpdatePrivateCloudRequest() + pb_message = vmwareengine.GetDnsForwardingRequest.pb( + vmwareengine.GetDnsForwardingRequest() ) transcode.return_value = { "method": "post", @@ -13612,19 +35451,19 @@ def test_update_private_cloud_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine_resources.DnsForwarding.to_json( + vmwareengine_resources.DnsForwarding() ) - request = vmwareengine.UpdatePrivateCloudRequest() + request = vmwareengine.GetDnsForwardingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.DnsForwarding() - client.update_private_cloud( + client.get_dns_forwarding( request, metadata=[ ("key", "val"), @@ -13636,8 +35475,8 @@ def test_update_private_cloud_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_private_cloud_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdatePrivateCloudRequest +def test_get_dns_forwarding_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetDnsForwardingRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13646,9 +35485,7 @@ def test_update_private_cloud_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "private_cloud": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" - } + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" } request = request_type(**request_init) @@ -13661,10 +35498,10 @@ def test_update_private_cloud_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_private_cloud(request) + client.get_dns_forwarding(request) -def test_update_private_cloud_rest_flattened(): +def test_get_dns_forwarding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13673,43 +35510,42 @@ def test_update_private_cloud_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsForwarding() # get arguments that satisfy an http rule for this method sample_request = { - "private_cloud": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" - } + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" } # get truthy value for each flattened field mock_args = dict( - private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsForwarding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_private_cloud(**mock_args) + client.get_dns_forwarding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_cloud.name=projects/*/locations/*/privateClouds/*}" + "%s/v1/{name=projects/*/locations/*/privateClouds/*/dnsForwarding}" % client.transport._host, args[1], ) -def test_update_private_cloud_rest_flattened_error(transport: str = "rest"): +def test_get_dns_forwarding_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13718,14 +35554,13 @@ def test_update_private_cloud_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_private_cloud( - vmwareengine.UpdatePrivateCloudRequest(), - private_cloud=vmwareengine_resources.PrivateCloud(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_dns_forwarding( + vmwareengine.GetDnsForwardingRequest(), + name="name_value", ) -def test_update_private_cloud_rest_error(): +def test_get_dns_forwarding_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13734,18 +35569,100 @@ def test_update_private_cloud_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeletePrivateCloudRequest, + vmwareengine.UpdateDnsForwardingRequest, dict, ], ) -def test_delete_private_cloud_rest(request_type): +def test_update_dns_forwarding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "dns_forwarding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" + } + } + request_init["dns_forwarding"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "forwarding_rules": [ + { + "domain": "domain_value", + "name_servers": ["name_servers_value1", "name_servers_value2"], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateDnsForwardingRequest.meta.fields["dns_forwarding"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dns_forwarding"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dns_forwarding"][field])): + del request_init["dns_forwarding"][field][i][subfield] + else: + del request_init["dns_forwarding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -13760,19 +35677,18 @@ def test_delete_private_cloud_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_private_cloud(request) + response = client.update_dns_forwarding(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_private_cloud_rest_required_fields( - request_type=vmwareengine.DeletePrivateCloudRequest, +def test_update_dns_forwarding_rest_required_fields( + request_type=vmwareengine.UpdateDnsForwardingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13787,29 +35703,24 @@ def test_delete_private_cloud_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_private_cloud._get_unset_required_fields(jsonified_request) + ).update_dns_forwarding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_private_cloud._get_unset_required_fields(jsonified_request) + ).update_dns_forwarding._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "delay_hours", - "force", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13830,9 +35741,10 @@ def test_delete_private_cloud_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13842,33 +35754,37 @@ def test_delete_private_cloud_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_private_cloud(request) + response = client.update_dns_forwarding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_private_cloud_rest_unset_required_fields(): +def test_update_dns_forwarding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_private_cloud._get_unset_required_fields({}) + unset_fields = transport.update_dns_forwarding._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "delayHours", - "force", "requestId", + "updateMask", + ) + ) + & set( + ( + "dnsForwarding", + "updateMask", ) ) - & set(("name",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_private_cloud_rest_interceptors(null_interceptor): +def test_update_dns_forwarding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13883,14 +35799,14 @@ def test_delete_private_cloud_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_delete_private_cloud" + transports.VmwareEngineRestInterceptor, "post_update_dns_forwarding" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_delete_private_cloud" + transports.VmwareEngineRestInterceptor, "pre_update_dns_forwarding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.DeletePrivateCloudRequest.pb( - vmwareengine.DeletePrivateCloudRequest() + pb_message = vmwareengine.UpdateDnsForwardingRequest.pb( + vmwareengine.UpdateDnsForwardingRequest() ) transcode.return_value = { "method": "post", @@ -13906,7 +35822,7 @@ def test_delete_private_cloud_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.DeletePrivateCloudRequest() + request = vmwareengine.UpdateDnsForwardingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13914,7 +35830,7 @@ def test_delete_private_cloud_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_private_cloud( + client.update_dns_forwarding( request, metadata=[ ("key", "val"), @@ -13926,8 +35842,8 @@ def test_delete_private_cloud_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_private_cloud_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.DeletePrivateCloudRequest +def test_update_dns_forwarding_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateDnsForwardingRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13935,7 +35851,11 @@ def test_delete_private_cloud_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "dns_forwarding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13947,10 +35867,10 @@ def test_delete_private_cloud_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_private_cloud(request) + client.update_dns_forwarding(request) -def test_delete_private_cloud_rest_flattened(): +def test_update_dns_forwarding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13963,12 +35883,15 @@ def test_delete_private_cloud_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" + "dns_forwarding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/dnsForwarding" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -13979,20 +35902,20 @@ def test_delete_private_cloud_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_private_cloud(**mock_args) + client.update_dns_forwarding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*}" + "%s/v1/{dns_forwarding.name=projects/*/locations/*/privateClouds/*/dnsForwarding}" % client.transport._host, args[1], ) -def test_delete_private_cloud_rest_flattened_error(transport: str = "rest"): +def test_update_dns_forwarding_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14001,13 +35924,14 @@ def test_delete_private_cloud_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_private_cloud( - vmwareengine.DeletePrivateCloudRequest(), - name="name_value", + client.update_dns_forwarding( + vmwareengine.UpdateDnsForwardingRequest(), + dns_forwarding=vmwareengine_resources.DnsForwarding(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_private_cloud_rest_error(): +def test_update_dns_forwarding_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14016,40 +35940,76 @@ def test_delete_private_cloud_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UndeletePrivateCloudRequest, + vmwareengine.GetNetworkPeeringRequest, dict, ], ) -def test_undelete_private_cloud_rest(request_type): +def test_get_network_peering_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.NetworkPeering( + name="name_value", + peer_network="peer_network_value", + export_custom_routes=True, + import_custom_routes=True, + exchange_subnet_routes=True, + export_custom_routes_with_public_ip=True, + import_custom_routes_with_public_ip=True, + state=vmwareengine_resources.NetworkPeering.State.INACTIVE, + state_details="state_details_value", + peer_mtu=865, + peer_network_type=vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD, + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.NetworkPeering.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undelete_private_cloud(request) + response = client.get_network_peering(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.NetworkPeering) + assert response.name == "name_value" + assert response.peer_network == "peer_network_value" + assert response.export_custom_routes is True + assert response.import_custom_routes is True + assert response.exchange_subnet_routes is True + assert response.export_custom_routes_with_public_ip is True + assert response.import_custom_routes_with_public_ip is True + assert response.state == vmwareengine_resources.NetworkPeering.State.INACTIVE + assert response.state_details == "state_details_value" + assert response.peer_mtu == 865 + assert ( + response.peer_network_type + == vmwareengine_resources.NetworkPeering.PeerNetworkType.STANDARD + ) + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" -def test_undelete_private_cloud_rest_required_fields( - request_type=vmwareengine.UndeletePrivateCloudRequest, +def test_get_network_peering_rest_required_fields( + request_type=vmwareengine.GetNetworkPeeringRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -14069,7 +36029,7 @@ def test_undelete_private_cloud_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undelete_private_cloud._get_unset_required_fields(jsonified_request) + ).get_network_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14078,7 +36038,7 @@ def test_undelete_private_cloud_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undelete_private_cloud._get_unset_required_fields(jsonified_request) + ).get_network_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14092,7 +36052,7 @@ def test_undelete_private_cloud_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.NetworkPeering() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14104,37 +36064,39 @@ def test_undelete_private_cloud_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.NetworkPeering.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undelete_private_cloud(request) + response = client.get_network_peering(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_undelete_private_cloud_rest_unset_required_fields(): +def test_get_network_peering_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.undelete_private_cloud._get_unset_required_fields({}) + unset_fields = transport.get_network_peering._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undelete_private_cloud_rest_interceptors(null_interceptor): +def test_get_network_peering_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14147,16 +36109,14 @@ def test_undelete_private_cloud_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_undelete_private_cloud" + transports.VmwareEngineRestInterceptor, "post_get_network_peering" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_undelete_private_cloud" + transports.VmwareEngineRestInterceptor, "pre_get_network_peering" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UndeletePrivateCloudRequest.pb( - vmwareengine.UndeletePrivateCloudRequest() + pb_message = vmwareengine.GetNetworkPeeringRequest.pb( + vmwareengine.GetNetworkPeeringRequest() ) transcode.return_value = { "method": "post", @@ -14168,19 +36128,19 @@ def test_undelete_private_cloud_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine_resources.NetworkPeering.to_json( + vmwareengine_resources.NetworkPeering() ) - request = vmwareengine.UndeletePrivateCloudRequest() + request = vmwareengine.GetNetworkPeeringRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.NetworkPeering() - client.undelete_private_cloud( + client.get_network_peering( request, metadata=[ ("key", "val"), @@ -14192,8 +36152,8 @@ def test_undelete_private_cloud_rest_interceptors(null_interceptor): post.assert_called_once() -def test_undelete_private_cloud_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UndeletePrivateCloudRequest +def test_get_network_peering_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetNetworkPeeringRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14201,7 +36161,9 @@ def test_undelete_private_cloud_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/privateClouds/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14213,10 +36175,10 @@ def test_undelete_private_cloud_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.undelete_private_cloud(request) + client.get_network_peering(request) -def test_undelete_private_cloud_rest_flattened(): +def test_get_network_peering_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14225,11 +36187,11 @@ def test_undelete_private_cloud_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.NetworkPeering() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } # get truthy value for each flattened field @@ -14241,24 +36203,26 @@ def test_undelete_private_cloud_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.NetworkPeering.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.undelete_private_cloud(**mock_args) + client.get_network_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*}:undelete" + "%s/v1/{name=projects/*/locations/*/networkPeerings/*}" % client.transport._host, args[1], ) -def test_undelete_private_cloud_rest_flattened_error(transport: str = "rest"): +def test_get_network_peering_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14267,13 +36231,13 @@ def test_undelete_private_cloud_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.undelete_private_cloud( - vmwareengine.UndeletePrivateCloudRequest(), + client.get_network_peering( + vmwareengine.GetNetworkPeeringRequest(), name="name_value", ) -def test_undelete_private_cloud_rest_error(): +def test_get_network_peering_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14282,26 +36246,24 @@ def test_undelete_private_cloud_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListClustersRequest, + vmwareengine.ListNetworkPeeringsRequest, dict, ], ) -def test_list_clusters_rest(request_type): +def test_list_network_peerings_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListClustersResponse( + return_value = vmwareengine.ListNetworkPeeringsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -14310,21 +36272,21 @@ def test_list_clusters_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListClustersResponse.pb(return_value) + return_value = vmwareengine.ListNetworkPeeringsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.list_network_peerings(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) + assert isinstance(response, pagers.ListNetworkPeeringsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_clusters_rest_required_fields( - request_type=vmwareengine.ListClustersRequest, +def test_list_network_peerings_rest_required_fields( + request_type=vmwareengine.ListNetworkPeeringsRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -14344,7 +36306,7 @@ def test_list_clusters_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).list_network_peerings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14353,7 +36315,7 @@ def test_list_clusters_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_clusters._get_unset_required_fields(jsonified_request) + ).list_network_peerings._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -14376,7 +36338,7 @@ def test_list_clusters_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListClustersResponse() + return_value = vmwareengine.ListNetworkPeeringsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14397,25 +36359,25 @@ def test_list_clusters_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListClustersResponse.pb(return_value) + return_value = vmwareengine.ListNetworkPeeringsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_clusters(request) + response = client.list_network_peerings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_clusters_rest_unset_required_fields(): +def test_list_network_peerings_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_clusters._get_unset_required_fields({}) + unset_fields = transport.list_network_peerings._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -14430,7 +36392,7 @@ def test_list_clusters_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_clusters_rest_interceptors(null_interceptor): +def test_list_network_peerings_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14443,14 +36405,14 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_clusters" + transports.VmwareEngineRestInterceptor, "post_list_network_peerings" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_clusters" + transports.VmwareEngineRestInterceptor, "pre_list_network_peerings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListClustersRequest.pb( - vmwareengine.ListClustersRequest() + pb_message = vmwareengine.ListNetworkPeeringsRequest.pb( + vmwareengine.ListNetworkPeeringsRequest() ) transcode.return_value = { "method": "post", @@ -14462,19 +36424,19 @@ def test_list_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListClustersResponse.to_json( - vmwareengine.ListClustersResponse() + req.return_value._content = vmwareengine.ListNetworkPeeringsResponse.to_json( + vmwareengine.ListNetworkPeeringsResponse() ) - request = vmwareengine.ListClustersRequest() + request = vmwareengine.ListNetworkPeeringsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListClustersResponse() + post.return_value = vmwareengine.ListNetworkPeeringsResponse() - client.list_clusters( + client.list_network_peerings( request, metadata=[ ("key", "val"), @@ -14486,8 +36448,8 @@ def test_list_clusters_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_clusters_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListClustersRequest +def test_list_network_peerings_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListNetworkPeeringsRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14495,9 +36457,7 @@ def test_list_clusters_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14509,10 +36469,10 @@ def test_list_clusters_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_clusters(request) + client.list_network_peerings(request) -def test_list_clusters_rest_flattened(): +def test_list_network_peerings_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14521,12 +36481,10 @@ def test_list_clusters_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListClustersResponse() + return_value = vmwareengine.ListNetworkPeeringsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -14538,25 +36496,25 @@ def test_list_clusters_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListClustersResponse.pb(return_value) + return_value = vmwareengine.ListNetworkPeeringsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_clusters(**mock_args) + client.list_network_peerings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters" + "%s/v1/{parent=projects/*/locations/*}/networkPeerings" % client.transport._host, args[1], ) -def test_list_clusters_rest_flattened_error(transport: str = "rest"): +def test_list_network_peerings_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14565,13 +36523,13 @@ def test_list_clusters_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_clusters( - vmwareengine.ListClustersRequest(), + client.list_network_peerings( + vmwareengine.ListNetworkPeeringsRequest(), parent="parent_value", ) -def test_list_clusters_rest_pager(transport: str = "rest"): +def test_list_network_peerings_rest_pager(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14583,28 +36541,28 @@ def test_list_clusters_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - vmwareengine.ListClustersResponse( - clusters=[ - vmwareengine_resources.Cluster(), - vmwareengine_resources.Cluster(), - vmwareengine_resources.Cluster(), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), ], next_page_token="abc", ), - vmwareengine.ListClustersResponse( - clusters=[], + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[], next_page_token="def", ), - vmwareengine.ListClustersResponse( - clusters=[ - vmwareengine_resources.Cluster(), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), ], next_page_token="ghi", ), - vmwareengine.ListClustersResponse( - clusters=[ - vmwareengine_resources.Cluster(), - vmwareengine_resources.Cluster(), + vmwareengine.ListNetworkPeeringsResponse( + network_peerings=[ + vmwareengine_resources.NetworkPeering(), + vmwareengine_resources.NetworkPeering(), ], ), ) @@ -14612,24 +36570,26 @@ def test_list_clusters_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(vmwareengine.ListClustersResponse.to_json(x) for x in response) + response = tuple( + vmwareengine.ListNetworkPeeringsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_clusters(request=sample_request) + pager = client.list_network_peerings(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.Cluster) for i in results) + assert all( + isinstance(i, vmwareengine_resources.NetworkPeering) for i in results + ) - pages = list(client.list_clusters(request=sample_request).pages) + pages = list(client.list_network_peerings(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -14637,56 +36597,131 @@ def test_list_clusters_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetClusterRequest, + vmwareengine.CreateNetworkPeeringRequest, dict, ], ) -def test_get_cluster_rest(request_type): +def test_create_network_peering_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["network_peering"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "peer_network": "peer_network_value", + "export_custom_routes": True, + "import_custom_routes": True, + "exchange_subnet_routes": True, + "export_custom_routes_with_public_ip": True, + "import_custom_routes_with_public_ip": True, + "state": 1, + "state_details": "state_details_value", + "peer_mtu": 865, + "peer_network_type": 1, + "uid": "uid_value", + "vmware_engine_network": "vmware_engine_network_value", + "description": "description_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateNetworkPeeringRequest.meta.fields["network_peering"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_peering"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_peering"][field])): + del request_init["network_peering"][field][i][subfield] + else: + del request_init["network_peering"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Cluster( - name="name_value", - state=vmwareengine_resources.Cluster.State.ACTIVE, - management=True, - uid="uid_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.create_network_peering(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Cluster) - assert response.name == "name_value" - assert response.state == vmwareengine_resources.Cluster.State.ACTIVE - assert response.management is True - assert response.uid == "uid_value" + assert response.operation.name == "operations/spam" -def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRequest): +def test_create_network_peering_rest_required_fields( + request_type=vmwareengine.CreateNetworkPeeringRequest, +): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["network_peering_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14698,24 +36733,37 @@ def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRe ) # verify fields with default values are dropped + assert "networkPeeringId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).create_network_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "networkPeeringId" in jsonified_request + assert jsonified_request["networkPeeringId"] == request_init["network_peering_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["networkPeeringId"] = "network_peering_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_cluster._get_unset_required_fields(jsonified_request) + ).create_network_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "network_peering_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "networkPeeringId" in jsonified_request + assert jsonified_request["networkPeeringId"] == "network_peering_id_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14724,7 +36772,7 @@ def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14736,39 +36784,57 @@ def test_get_cluster_rest_required_fields(request_type=vmwareengine.GetClusterRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_cluster(request) + response = client.create_network_peering(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "networkPeeringId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_cluster_rest_unset_required_fields(): +def test_create_network_peering_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_network_peering._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "networkPeeringId", + "requestId", + ) + ) + & set( + ( + "parent", + "networkPeeringId", + "networkPeering", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cluster_rest_interceptors(null_interceptor): +def test_create_network_peering_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14781,13 +36847,17 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_cluster" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_network_peering" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_cluster" + transports.VmwareEngineRestInterceptor, "pre_create_network_peering" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetClusterRequest.pb(vmwareengine.GetClusterRequest()) + pb_message = vmwareengine.CreateNetworkPeeringRequest.pb( + vmwareengine.CreateNetworkPeeringRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14798,19 +36868,19 @@ def test_get_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.Cluster.to_json( - vmwareengine_resources.Cluster() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.GetClusterRequest() + request = vmwareengine.CreateNetworkPeeringRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.Cluster() + post.return_value = operations_pb2.Operation() - client.get_cluster( + client.create_network_peering( request, metadata=[ ("key", "val"), @@ -14822,8 +36892,8 @@ def test_get_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_cluster_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetClusterRequest +def test_create_network_peering_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateNetworkPeeringRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14831,9 +36901,7 @@ def test_get_cluster_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14845,10 +36913,10 @@ def test_get_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_cluster(request) + client.create_network_peering(request) -def test_get_cluster_rest_flattened(): +def test_create_network_peering_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14857,42 +36925,40 @@ def test_get_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Cluster() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Cluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_cluster(**mock_args) + client.create_network_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}" + "%s/v1/{parent=projects/*/locations/*}/networkPeerings" % client.transport._host, args[1], ) -def test_get_cluster_rest_flattened_error(transport: str = "rest"): +def test_create_network_peering_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14901,13 +36967,15 @@ def test_get_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_cluster( - vmwareengine.GetClusterRequest(), - name="name_value", + client.create_network_peering( + vmwareengine.CreateNetworkPeeringRequest(), + parent="parent_value", + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), + network_peering_id="network_peering_id_value", ) -def test_get_cluster_rest_error(): +def test_create_network_peering_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -14916,11 +36984,11 @@ def test_get_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateClusterRequest, + vmwareengine.DeleteNetworkPeeringRequest, dict, ], ) -def test_create_cluster_rest(request_type): +def test_delete_network_peering_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14928,84 +36996,8 @@ def test_create_cluster_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } - request_init["cluster"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "management": True, - "uid": "uid_value", - "node_type_configs": {}, + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreateClusterRequest.meta.fields["cluster"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] - else: - del request_init["cluster"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15020,20 +37012,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.delete_network_peering(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_cluster_rest_required_fields( - request_type=vmwareengine.CreateClusterRequest, +def test_delete_network_peering_rest_required_fields( + request_type=vmwareengine.DeleteNetworkPeeringRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" - request_init["cluster_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15045,38 +37036,26 @@ def test_create_cluster_rest_required_fields( ) # verify fields with default values are dropped - assert "clusterId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).delete_network_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == request_init["cluster_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["clusterId"] = "cluster_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_cluster._get_unset_required_fields(jsonified_request) + ).delete_network_peering._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "cluster_id", - "request_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clusterId" in jsonified_request - assert jsonified_request["clusterId"] == "cluster_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15097,10 +37076,9 @@ def test_create_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15110,45 +37088,24 @@ def test_create_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_cluster(request) + response = client.delete_network_peering(request) - expected_params = [ - ( - "clusterId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_cluster_rest_unset_required_fields(): +def test_delete_network_peering_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_cluster._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "clusterId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "clusterId", - "cluster", - ) - ) - ) + unset_fields = transport.delete_network_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_cluster_rest_interceptors(null_interceptor): +def test_delete_network_peering_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15163,14 +37120,14 @@ def test_create_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_cluster" + transports.VmwareEngineRestInterceptor, "post_delete_network_peering" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_cluster" + transports.VmwareEngineRestInterceptor, "pre_delete_network_peering" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = vmwareengine.CreateClusterRequest.pb( - vmwareengine.CreateClusterRequest() + post.assert_not_called() + pb_message = vmwareengine.DeleteNetworkPeeringRequest.pb( + vmwareengine.DeleteNetworkPeeringRequest() ) transcode.return_value = { "method": "post", @@ -15186,7 +37143,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.CreateClusterRequest() + request = vmwareengine.DeleteNetworkPeeringRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -15194,7 +37151,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cluster( + client.delete_network_peering( request, metadata=[ ("key", "val"), @@ -15206,8 +37163,8 @@ def test_create_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_cluster_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreateClusterRequest +def test_delete_network_peering_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteNetworkPeeringRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15216,7 +37173,7 @@ def test_create_cluster_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } request = request_type(**request_init) @@ -15229,10 +37186,10 @@ def test_create_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_cluster(request) + client.delete_network_peering(request) -def test_create_cluster_rest_flattened(): +def test_delete_network_peering_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15245,14 +37202,12 @@ def test_create_cluster_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - cluster=vmwareengine_resources.Cluster(name="name_value"), - cluster_id="cluster_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -15263,20 +37218,20 @@ def test_create_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_cluster(**mock_args) + client.delete_network_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/clusters" + "%s/v1/{name=projects/*/locations/*/networkPeerings/*}" % client.transport._host, args[1], ) -def test_create_cluster_rest_flattened_error(transport: str = "rest"): +def test_delete_network_peering_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15285,15 +37240,13 @@ def test_create_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_cluster( - vmwareengine.CreateClusterRequest(), - parent="parent_value", - cluster=vmwareengine_resources.Cluster(name="name_value"), - cluster_id="cluster_id_value", + client.delete_network_peering( + vmwareengine.DeleteNetworkPeeringRequest(), + name="name_value", ) -def test_create_cluster_rest_error(): +def test_delete_network_peering_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15302,11 +37255,11 @@ def test_create_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateClusterRequest, + vmwareengine.UpdateNetworkPeeringRequest, dict, ], ) -def test_update_cluster_rest(request_type): +def test_update_network_peering_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15314,25 +37267,34 @@ def test_update_cluster_rest(request_type): # send a request that will satisfy transcoding request_init = { - "cluster": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "network_peering": { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } } - request_init["cluster"] = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4", + request_init["network_peering"] = { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "peer_network": "peer_network_value", + "export_custom_routes": True, + "import_custom_routes": True, + "exchange_subnet_routes": True, + "export_custom_routes_with_public_ip": True, + "import_custom_routes_with_public_ip": True, "state": 1, - "management": True, + "state_details": "state_details_value", + "peer_mtu": 865, + "peer_network_type": 1, "uid": "uid_value", - "node_type_configs": {}, + "vmware_engine_network": "vmware_engine_network_value", + "description": "description_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdateClusterRequest.meta.fields["cluster"] + test_field = vmwareengine.UpdateNetworkPeeringRequest.meta.fields["network_peering"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -15360,7 +37322,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cluster"].items(): # pragma: NO COVER + for field, value in request_init["network_peering"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -15390,10 +37352,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["cluster"][field])): - del request_init["cluster"][field][i][subfield] + for i in range(0, len(request_init["network_peering"][field])): + del request_init["network_peering"][field][i][subfield] else: - del request_init["cluster"][field][subfield] + del request_init["network_peering"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15408,14 +37370,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.update_network_peering(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_cluster_rest_required_fields( - request_type=vmwareengine.UpdateClusterRequest, +def test_update_network_peering_rest_required_fields( + request_type=vmwareengine.UpdateNetworkPeeringRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -15434,20 +37396,19 @@ def test_update_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).update_network_peering._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_cluster._get_unset_required_fields(jsonified_request) + ).update_network_peering._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "request_id", "update_mask", - "validate_only", ) ) jsonified_request.update(unset_fields) @@ -15486,38 +37447,37 @@ def test_update_cluster_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_cluster(request) + response = client.update_network_peering(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_cluster_rest_unset_required_fields(): +def test_update_network_peering_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_cluster._get_unset_required_fields({}) + unset_fields = transport.update_network_peering._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "requestId", "updateMask", - "validateOnly", ) ) & set( ( + "networkPeering", "updateMask", - "cluster", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): +def test_update_network_peering_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15532,14 +37492,14 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_cluster" + transports.VmwareEngineRestInterceptor, "post_update_network_peering" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_cluster" + transports.VmwareEngineRestInterceptor, "pre_update_network_peering" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdateClusterRequest.pb( - vmwareengine.UpdateClusterRequest() + pb_message = vmwareengine.UpdateNetworkPeeringRequest.pb( + vmwareengine.UpdateNetworkPeeringRequest() ) transcode.return_value = { "method": "post", @@ -15555,7 +37515,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.UpdateClusterRequest() + request = vmwareengine.UpdateNetworkPeeringRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -15563,7 +37523,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_cluster( + client.update_network_peering( request, metadata=[ ("key", "val"), @@ -15575,8 +37535,8 @@ def test_update_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_cluster_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdateClusterRequest +def test_update_network_peering_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateNetworkPeeringRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15585,8 +37545,8 @@ def test_update_cluster_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "cluster": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "network_peering": { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } } request = request_type(**request_init) @@ -15600,10 +37560,10 @@ def test_update_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_cluster(request) + client.update_network_peering(request) -def test_update_cluster_rest_flattened(): +def test_update_network_peering_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15616,14 +37576,14 @@ def test_update_cluster_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "cluster": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "network_peering": { + "name": "projects/sample1/locations/sample2/networkPeerings/sample3" } } # get truthy value for each flattened field mock_args = dict( - cluster=vmwareengine_resources.Cluster(name="name_value"), + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -15635,20 +37595,20 @@ def test_update_cluster_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_cluster(**mock_args) + client.update_network_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{cluster.name=projects/*/locations/*/privateClouds/*/clusters/*}" + "%s/v1/{network_peering.name=projects/*/locations/*/networkPeerings/*}" % client.transport._host, args[1], ) -def test_update_cluster_rest_flattened_error(transport: str = "rest"): +def test_update_network_peering_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15657,14 +37617,14 @@ def test_update_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_cluster( - vmwareengine.UpdateClusterRequest(), - cluster=vmwareengine_resources.Cluster(name="name_value"), + client.update_network_peering( + vmwareengine.UpdateNetworkPeeringRequest(), + network_peering=vmwareengine_resources.NetworkPeering(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_cluster_rest_error(): +def test_update_network_peering_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15673,11 +37633,11 @@ def test_update_cluster_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeleteClusterRequest, + vmwareengine.ListPeeringRoutesRequest, dict, ], ) -def test_delete_cluster_rest(request_type): +def test_list_peering_routes_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15685,35 +37645,40 @@ def test_delete_cluster_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "parent": "projects/sample1/locations/sample2/networkPeerings/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListPeeringRoutesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListPeeringRoutesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.list_peering_routes(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListPeeringRoutesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_cluster_rest_required_fields( - request_type=vmwareengine.DeleteClusterRequest, +def test_list_peering_routes_rest_required_fields( + request_type=vmwareengine.ListPeeringRoutesRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15728,23 +37693,29 @@ def test_delete_cluster_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).list_peering_routes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_cluster._get_unset_required_fields(jsonified_request) + ).list_peering_routes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15753,7 +37724,7 @@ def test_delete_cluster_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListPeeringRoutesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15765,36 +37736,48 @@ def test_delete_cluster_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListPeeringRoutesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_cluster(request) + response = client.list_peering_routes(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_cluster_rest_unset_required_fields(): +def test_list_peering_routes_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.list_peering_routes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): +def test_list_peering_routes_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15807,16 +37790,14 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_delete_cluster" + transports.VmwareEngineRestInterceptor, "post_list_peering_routes" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_delete_cluster" + transports.VmwareEngineRestInterceptor, "pre_list_peering_routes" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.DeleteClusterRequest.pb( - vmwareengine.DeleteClusterRequest() + pb_message = vmwareengine.ListPeeringRoutesRequest.pb( + vmwareengine.ListPeeringRoutesRequest() ) transcode.return_value = { "method": "post", @@ -15828,19 +37809,19 @@ def test_delete_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine.ListPeeringRoutesResponse.to_json( + vmwareengine.ListPeeringRoutesResponse() ) - request = vmwareengine.DeleteClusterRequest() + request = vmwareengine.ListPeeringRoutesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine.ListPeeringRoutesResponse() - client.delete_cluster( + client.list_peering_routes( request, metadata=[ ("key", "val"), @@ -15852,8 +37833,8 @@ def test_delete_cluster_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_cluster_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.DeleteClusterRequest +def test_list_peering_routes_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListPeeringRoutesRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15862,7 +37843,7 @@ def test_delete_cluster_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "parent": "projects/sample1/locations/sample2/networkPeerings/sample3" } request = request_type(**request_init) @@ -15875,10 +37856,10 @@ def test_delete_cluster_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_cluster(request) + client.list_peering_routes(request) -def test_delete_cluster_rest_flattened(): +def test_list_peering_routes_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15887,40 +37868,42 @@ def test_delete_cluster_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListPeeringRoutesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/clusters/sample4" + "parent": "projects/sample1/locations/sample2/networkPeerings/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListPeeringRoutesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_cluster(**mock_args) + client.list_peering_routes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*/clusters/*}" + "%s/v1/{parent=projects/*/locations/*/networkPeerings/*}/peeringRoutes" % client.transport._host, args[1], ) -def test_delete_cluster_rest_flattened_error(transport: str = "rest"): +def test_list_peering_routes_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15929,69 +37912,198 @@ def test_delete_cluster_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_cluster( - vmwareengine.DeleteClusterRequest(), - name="name_value", + client.list_peering_routes( + vmwareengine.ListPeeringRoutesRequest(), + parent="parent_value", + ) + + +def test_list_peering_routes_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="abc", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[], + next_page_token="def", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + ], + next_page_token="ghi", + ), + vmwareengine.ListPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + ], + ), ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListPeeringRoutesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/networkPeerings/sample3" + } + + pager = client.list_peering_routes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) + + pages = list(client.list_peering_routes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vmwareengine.CreateHcxActivationKeyRequest, + dict, + ], +) +def test_create_hcx_activation_key_rest(request_type): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + request_init["hcx_activation_key"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "activation_key": "activation_key_value", + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateHcxActivationKeyRequest.meta.fields[ + "hcx_activation_key" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_delete_cluster_rest_error(): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["hcx_activation_key"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - vmwareengine.ListSubnetsRequest, - dict, - ], -) -def test_list_subnets_rest(request_type): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["hcx_activation_key"][field])): + del request_init["hcx_activation_key"][field][i][subfield] + else: + del request_init["hcx_activation_key"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListSubnetsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListSubnetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_subnets(request) + response = client.create_hcx_activation_key(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubnetsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_subnets_rest_required_fields( - request_type=vmwareengine.ListSubnetsRequest, +def test_create_hcx_activation_key_rest_required_fields( + request_type=vmwareengine.CreateHcxActivationKeyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} request_init["parent"] = "" + request_init["hcx_activation_key_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16003,24 +38115,30 @@ def test_list_subnets_rest_required_fields( ) # verify fields with default values are dropped + assert "hcxActivationKeyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_subnets._get_unset_required_fields(jsonified_request) + ).create_hcx_activation_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "hcxActivationKeyId" in jsonified_request + assert ( + jsonified_request["hcxActivationKeyId"] == request_init["hcx_activation_key_id"] + ) jsonified_request["parent"] = "parent_value" + jsonified_request["hcxActivationKeyId"] = "hcx_activation_key_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_subnets._get_unset_required_fields(jsonified_request) + ).create_hcx_activation_key._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "hcx_activation_key_id", + "request_id", ) ) jsonified_request.update(unset_fields) @@ -16028,6 +38146,8 @@ def test_list_subnets_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "hcxActivationKeyId" in jsonified_request + assert jsonified_request["hcxActivationKeyId"] == "hcx_activation_key_id_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16036,7 +38156,7 @@ def test_list_subnets_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListSubnetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16048,47 +38168,57 @@ def test_list_subnets_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine.ListSubnetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_subnets(request) + response = client.create_hcx_activation_key(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "hcxActivationKeyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_subnets_rest_unset_required_fields(): +def test_create_hcx_activation_key_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_subnets._get_unset_required_fields({}) + unset_fields = transport.create_hcx_activation_key._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "hcxActivationKeyId", + "requestId", + ) + ) + & set( + ( + "parent", + "hcxActivationKey", + "hcxActivationKeyId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_subnets_rest_interceptors(null_interceptor): +def test_create_hcx_activation_key_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16101,14 +38231,16 @@ def test_list_subnets_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_subnets" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_hcx_activation_key" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_subnets" + transports.VmwareEngineRestInterceptor, "pre_create_hcx_activation_key" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListSubnetsRequest.pb( - vmwareengine.ListSubnetsRequest() + pb_message = vmwareengine.CreateHcxActivationKeyRequest.pb( + vmwareengine.CreateHcxActivationKeyRequest() ) transcode.return_value = { "method": "post", @@ -16120,19 +38252,19 @@ def test_list_subnets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListSubnetsResponse.to_json( - vmwareengine.ListSubnetsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ListSubnetsRequest() + request = vmwareengine.CreateHcxActivationKeyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListSubnetsResponse() + post.return_value = operations_pb2.Operation() - client.list_subnets( + client.create_hcx_activation_key( request, metadata=[ ("key", "val"), @@ -16144,8 +38276,8 @@ def test_list_subnets_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_subnets_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListSubnetsRequest +def test_create_hcx_activation_key_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateHcxActivationKeyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16167,10 +38299,10 @@ def test_list_subnets_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_subnets(request) + client.create_hcx_activation_key(request) -def test_list_subnets_rest_flattened(): +def test_create_hcx_activation_key_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16179,7 +38311,7 @@ def test_list_subnets_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListSubnetsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -16189,32 +38321,34 @@ def test_list_subnets_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListSubnetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_subnets(**mock_args) + client.create_hcx_activation_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/subnets" + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys" % client.transport._host, args[1], ) -def test_list_subnets_rest_flattened_error(transport: str = "rest"): +def test_create_hcx_activation_key_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16223,83 +38357,30 @@ def test_list_subnets_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_subnets( - vmwareengine.ListSubnetsRequest(), + client.create_hcx_activation_key( + vmwareengine.CreateHcxActivationKeyRequest(), parent="parent_value", + hcx_activation_key=vmwareengine_resources.HcxActivationKey( + name="name_value" + ), + hcx_activation_key_id="hcx_activation_key_id_value", ) -def test_list_subnets_rest_pager(transport: str = "rest"): +def test_create_hcx_activation_key_rest_error(): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - ], - next_page_token="abc", - ), - vmwareengine.ListSubnetsResponse( - subnets=[], - next_page_token="def", - ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - ], - next_page_token="ghi", - ), - vmwareengine.ListSubnetsResponse( - subnets=[ - vmwareengine_resources.Subnet(), - vmwareengine_resources.Subnet(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(vmwareengine.ListSubnetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } - - pager = client.list_subnets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.Subnet) for i in results) - - pages = list(client.list_subnets(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetSubnetRequest, + vmwareengine.ListHcxActivationKeysRequest, dict, ], ) -def test_get_subnet_rest(request_type): +def test_list_hcx_activation_keys_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16307,46 +38388,42 @@ def test_get_subnet_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Subnet( - name="name_value", - ip_cidr_range="ip_cidr_range_value", - gateway_ip="gateway_ip_value", - type_="type__value", - state=vmwareengine_resources.Subnet.State.ACTIVE, + return_value = vmwareengine.ListHcxActivationKeysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.Subnet.pb(return_value) + return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_subnet(request) + response = client.list_hcx_activation_keys(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Subnet) - assert response.name == "name_value" - assert response.ip_cidr_range == "ip_cidr_range_value" - assert response.gateway_ip == "gateway_ip_value" - assert response.type_ == "type__value" - assert response.state == vmwareengine_resources.Subnet.State.ACTIVE + assert isinstance(response, pagers.ListHcxActivationKeysPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_subnet_rest_required_fields(request_type=vmwareengine.GetSubnetRequest): +def test_list_hcx_activation_keys_rest_required_fields( + request_type=vmwareengine.ListHcxActivationKeysRequest, +): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16361,21 +38438,28 @@ def test_get_subnet_rest_required_fields(request_type=vmwareengine.GetSubnetRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_subnet._get_unset_required_fields(jsonified_request) + ).list_hcx_activation_keys._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_subnet._get_unset_required_fields(jsonified_request) + ).list_hcx_activation_keys._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16384,7 +38468,7 @@ def test_get_subnet_rest_required_fields(request_type=vmwareengine.GetSubnetRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Subnet() + return_value = vmwareengine.ListHcxActivationKeysResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16405,30 +38489,38 @@ def test_get_subnet_rest_required_fields(request_type=vmwareengine.GetSubnetRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.Subnet.pb(return_value) + return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_subnet(request) + response = client.list_hcx_activation_keys(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_subnet_rest_unset_required_fields(): +def test_list_hcx_activation_keys_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_subnet._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_hcx_activation_keys._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_subnet_rest_interceptors(null_interceptor): +def test_list_hcx_activation_keys_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16441,13 +38533,15 @@ def test_get_subnet_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_subnet" + transports.VmwareEngineRestInterceptor, "post_list_hcx_activation_keys" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_subnet" + transports.VmwareEngineRestInterceptor, "pre_list_hcx_activation_keys" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetSubnetRequest.pb(vmwareengine.GetSubnetRequest()) + pb_message = vmwareengine.ListHcxActivationKeysRequest.pb( + vmwareengine.ListHcxActivationKeysRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16458,19 +38552,19 @@ def test_get_subnet_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.Subnet.to_json( - vmwareengine_resources.Subnet() + req.return_value._content = vmwareengine.ListHcxActivationKeysResponse.to_json( + vmwareengine.ListHcxActivationKeysResponse() ) - request = vmwareengine.GetSubnetRequest() + request = vmwareengine.ListHcxActivationKeysRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.Subnet() + post.return_value = vmwareengine.ListHcxActivationKeysResponse() - client.get_subnet( + client.list_hcx_activation_keys( request, metadata=[ ("key", "val"), @@ -16482,8 +38576,8 @@ def test_get_subnet_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_subnet_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetSubnetRequest +def test_list_hcx_activation_keys_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListHcxActivationKeysRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16492,7 +38586,7 @@ def test_get_subnet_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } request = request_type(**request_init) @@ -16505,10 +38599,10 @@ def test_get_subnet_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_subnet(request) + client.list_hcx_activation_keys(request) -def test_get_subnet_rest_flattened(): +def test_list_hcx_activation_keys_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16517,16 +38611,16 @@ def test_get_subnet_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Subnet() + return_value = vmwareengine.ListHcxActivationKeysResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -16534,25 +38628,25 @@ def test_get_subnet_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.Subnet.pb(return_value) + return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_subnet(**mock_args) + client.list_hcx_activation_keys(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*/subnets/*}" + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys" % client.transport._host, args[1], ) -def test_get_subnet_rest_flattened_error(transport: str = "rest"): +def test_list_hcx_activation_keys_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16561,26 +38655,87 @@ def test_get_subnet_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_subnet( - vmwareengine.GetSubnetRequest(), - name="name_value", + client.list_hcx_activation_keys( + vmwareengine.ListHcxActivationKeysRequest(), + parent="parent_value", ) -def test_get_subnet_rest_error(): +def test_list_hcx_activation_keys_rest_pager(transport: str = "rest"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="abc", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[], + next_page_token="def", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + ], + next_page_token="ghi", + ), + vmwareengine.ListHcxActivationKeysResponse( + hcx_activation_keys=[ + vmwareengine_resources.HcxActivationKey(), + vmwareengine_resources.HcxActivationKey(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListHcxActivationKeysResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_hcx_activation_keys(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results + ) + + pages = list(client.list_hcx_activation_keys(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateSubnetRequest, + vmwareengine.GetHcxActivationKeyRequest, dict, ], ) -def test_update_subnet_rest(request_type): +def test_get_hcx_activation_key_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16588,110 +38743,46 @@ def test_update_subnet_rest(request_type): # send a request that will satisfy transcoding request_init = { - "subnet": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" - } - } - request_init["subnet"] = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4", - "ip_cidr_range": "ip_cidr_range_value", - "gateway_ip": "gateway_ip_value", - "type_": "type__value", - "state": 1, + "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdateSubnetRequest.meta.fields["subnet"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["subnet"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["subnet"][field])): - del request_init["subnet"][field][i][subfield] - else: - del request_init["subnet"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.HcxActivationKey( + name="name_value", + state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, + activation_key="activation_key_value", + uid="uid_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_subnet(request) + response = client.get_hcx_activation_key(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.HcxActivationKey) + assert response.name == "name_value" + assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE + assert response.activation_key == "activation_key_value" + assert response.uid == "uid_value" -def test_update_subnet_rest_required_fields( - request_type=vmwareengine.UpdateSubnetRequest, +def test_get_hcx_activation_key_rest_required_fields( + request_type=vmwareengine.GetHcxActivationKeyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16706,19 +38797,21 @@ def test_update_subnet_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_subnet._get_unset_required_fields(jsonified_request) + ).get_hcx_activation_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_subnet._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_hcx_activation_key._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16727,7 +38820,7 @@ def test_update_subnet_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.HcxActivationKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16739,45 +38832,39 @@ def test_update_subnet_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_subnet(request) + response = client.get_hcx_activation_key(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_subnet_rest_unset_required_fields(): +def test_get_hcx_activation_key_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_subnet._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "subnet", - ) - ) - ) + unset_fields = transport.get_hcx_activation_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_subnet_rest_interceptors(null_interceptor): +def test_get_hcx_activation_key_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16790,16 +38877,14 @@ def test_update_subnet_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_subnet" + transports.VmwareEngineRestInterceptor, "post_get_hcx_activation_key" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_subnet" + transports.VmwareEngineRestInterceptor, "pre_get_hcx_activation_key" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdateSubnetRequest.pb( - vmwareengine.UpdateSubnetRequest() + pb_message = vmwareengine.GetHcxActivationKeyRequest.pb( + vmwareengine.GetHcxActivationKeyRequest() ) transcode.return_value = { "method": "post", @@ -16811,19 +38896,19 @@ def test_update_subnet_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine_resources.HcxActivationKey.to_json( + vmwareengine_resources.HcxActivationKey() ) - request = vmwareengine.UpdateSubnetRequest() + request = vmwareengine.GetHcxActivationKeyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.HcxActivationKey() - client.update_subnet( + client.get_hcx_activation_key( request, metadata=[ ("key", "val"), @@ -16835,8 +38920,8 @@ def test_update_subnet_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_subnet_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdateSubnetRequest +def test_get_hcx_activation_key_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetHcxActivationKeyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16845,9 +38930,7 @@ def test_update_subnet_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "subnet": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" - } + "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" } request = request_type(**request_init) @@ -16860,10 +38943,10 @@ def test_update_subnet_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_subnet(request) + client.get_hcx_activation_key(request) -def test_update_subnet_rest_flattened(): +def test_get_hcx_activation_key_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16872,43 +38955,42 @@ def test_update_subnet_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.HcxActivationKey() # get arguments that satisfy an http rule for this method sample_request = { - "subnet": { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/subnets/sample4" - } + "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" } # get truthy value for each flattened field mock_args = dict( - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_subnet(**mock_args) + client.get_hcx_activation_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{subnet.name=projects/*/locations/*/privateClouds/*/subnets/*}" + "%s/v1/{name=projects/*/locations/*/privateClouds/*/hcxActivationKeys/*}" % client.transport._host, args[1], ) -def test_update_subnet_rest_flattened_error(transport: str = "rest"): +def test_get_hcx_activation_key_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16917,14 +38999,13 @@ def test_update_subnet_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_subnet( - vmwareengine.UpdateSubnetRequest(), - subnet=vmwareengine_resources.Subnet(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_hcx_activation_key( + vmwareengine.GetHcxActivationKeyRequest(), + name="name_value", ) -def test_update_subnet_rest_error(): +def test_get_hcx_activation_key_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -16933,52 +39014,65 @@ def test_update_subnet_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListNodeTypesRequest, + vmwareengine.GetNetworkPolicyRequest, dict, ], ) -def test_list_node_types_rest(request_type): +def test_get_network_policy_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNodeTypesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = vmwareengine_resources.NetworkPolicy( + name="name_value", + edge_services_cidr="edge_services_cidr_value", + uid="uid_value", + vmware_engine_network="vmware_engine_network_value", + description="description_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_node_types(request) + response = client.get_network_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNodeTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, vmwareengine_resources.NetworkPolicy) + assert response.name == "name_value" + assert response.edge_services_cidr == "edge_services_cidr_value" + assert response.uid == "uid_value" + assert response.vmware_engine_network == "vmware_engine_network_value" + assert response.description == "description_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) -def test_list_node_types_rest_required_fields( - request_type=vmwareengine.ListNodeTypesRequest, +def test_get_network_policy_rest_required_fields( + request_type=vmwareengine.GetNetworkPolicyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16993,29 +39087,21 @@ def test_list_node_types_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_node_types._get_unset_required_fields(jsonified_request) + ).get_network_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_node_types._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).get_network_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17024,7 +39110,7 @@ def test_list_node_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNodeTypesResponse() + return_value = vmwareengine_resources.NetworkPolicy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17045,39 +39131,30 @@ def test_list_node_types_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_node_types(request) + response = client.get_network_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_node_types_rest_unset_required_fields(): +def test_get_network_policy_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_node_types._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_network_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_node_types_rest_interceptors(null_interceptor): +def test_get_network_policy_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17090,14 +39167,14 @@ def test_list_node_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_node_types" + transports.VmwareEngineRestInterceptor, "post_get_network_policy" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_node_types" + transports.VmwareEngineRestInterceptor, "pre_get_network_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListNodeTypesRequest.pb( - vmwareengine.ListNodeTypesRequest() + pb_message = vmwareengine.GetNetworkPolicyRequest.pb( + vmwareengine.GetNetworkPolicyRequest() ) transcode.return_value = { "method": "post", @@ -17109,19 +39186,19 @@ def test_list_node_types_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListNodeTypesResponse.to_json( - vmwareengine.ListNodeTypesResponse() + req.return_value._content = vmwareengine_resources.NetworkPolicy.to_json( + vmwareengine_resources.NetworkPolicy() ) - request = vmwareengine.ListNodeTypesRequest() + request = vmwareengine.GetNetworkPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListNodeTypesResponse() + post.return_value = vmwareengine_resources.NetworkPolicy() - client.list_node_types( + client.get_network_policy( request, metadata=[ ("key", "val"), @@ -17133,8 +39210,8 @@ def test_list_node_types_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_node_types_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListNodeTypesRequest +def test_get_network_policy_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetNetworkPolicyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17142,7 +39219,9 @@ def test_list_node_types_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17154,10 +39233,10 @@ def test_list_node_types_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_node_types(request) + client.get_network_policy(request) -def test_list_node_types_rest_flattened(): +def test_get_network_policy_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17166,14 +39245,16 @@ def test_list_node_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNodeTypesResponse() + return_value = vmwareengine_resources.NetworkPolicy() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -17181,162 +39262,94 @@ def test_list_node_types_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListNodeTypesResponse.pb(return_value) + return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_node_types(**mock_args) + client.get_network_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/nodeTypes" % client.transport._host, - args[1], - ) - - -def test_list_node_types_rest_flattened_error(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_node_types( - vmwareengine.ListNodeTypesRequest(), - parent="parent_value", + "%s/v1/{name=projects/*/locations/*/networkPolicies/*}" + % client.transport._host, + args[1], ) -def test_list_node_types_rest_pager(transport: str = "rest"): +def test_get_network_policy_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - ], - next_page_token="abc", - ), - vmwareengine.ListNodeTypesResponse( - node_types=[], - next_page_token="def", - ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNodeTypesResponse( - node_types=[ - vmwareengine_resources.NodeType(), - vmwareengine_resources.NodeType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - vmwareengine.ListNodeTypesResponse.to_json(x) for x in response + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_network_policy( + vmwareengine.GetNetworkPolicyRequest(), + name="name_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_node_types(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.NodeType) for i in results) - pages = list(client.list_node_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_network_policy_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetNodeTypeRequest, + vmwareengine.ListNetworkPoliciesRequest, dict, ], ) -def test_get_node_type_rest(request_type): +def test_list_network_policies_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/nodeTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NodeType( - name="name_value", - node_type_id="node_type_id_value", - display_name="display_name_value", - virtual_cpu_count=1846, - total_core_count=1716, - memory_gb=961, - disk_size_gb=1261, - available_custom_core_counts=[2974], + return_value = vmwareengine.ListNetworkPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.NodeType.pb(return_value) + return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_node_type(request) + response = client.list_network_policies(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NodeType) - assert response.name == "name_value" - assert response.node_type_id == "node_type_id_value" - assert response.display_name == "display_name_value" - assert response.virtual_cpu_count == 1846 - assert response.total_core_count == 1716 - assert response.memory_gb == 961 - assert response.disk_size_gb == 1261 - assert response.available_custom_core_counts == [2974] + assert isinstance(response, pagers.ListNetworkPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_node_type_rest_required_fields( - request_type=vmwareengine.GetNodeTypeRequest, +def test_list_network_policies_rest_required_fields( + request_type=vmwareengine.ListNetworkPoliciesRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17351,21 +39364,30 @@ def test_get_node_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_node_type._get_unset_required_fields(jsonified_request) + ).list_network_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_node_type._get_unset_required_fields(jsonified_request) + ).list_network_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17374,7 +39396,7 @@ def test_get_node_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NodeType() + return_value = vmwareengine.ListNetworkPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17395,30 +39417,40 @@ def test_get_node_type_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.NodeType.pb(return_value) + return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_node_type(request) + response = client.list_network_policies(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_node_type_rest_unset_required_fields(): +def test_list_network_policies_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_node_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_network_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_node_type_rest_interceptors(null_interceptor): +def test_list_network_policies_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17431,14 +39463,14 @@ def test_get_node_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_node_type" + transports.VmwareEngineRestInterceptor, "post_list_network_policies" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_node_type" + transports.VmwareEngineRestInterceptor, "pre_list_network_policies" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetNodeTypeRequest.pb( - vmwareengine.GetNodeTypeRequest() + pb_message = vmwareengine.ListNetworkPoliciesRequest.pb( + vmwareengine.ListNetworkPoliciesRequest() ) transcode.return_value = { "method": "post", @@ -17450,19 +39482,19 @@ def test_get_node_type_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.NodeType.to_json( - vmwareengine_resources.NodeType() + req.return_value._content = vmwareengine.ListNetworkPoliciesResponse.to_json( + vmwareengine.ListNetworkPoliciesResponse() ) - request = vmwareengine.GetNodeTypeRequest() + request = vmwareengine.ListNetworkPoliciesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.NodeType() + post.return_value = vmwareengine.ListNetworkPoliciesResponse() - client.get_node_type( + client.list_network_policies( request, metadata=[ ("key", "val"), @@ -17474,8 +39506,8 @@ def test_get_node_type_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_node_type_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetNodeTypeRequest +def test_list_network_policies_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListNetworkPoliciesRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17483,7 +39515,7 @@ def test_get_node_type_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/nodeTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17495,10 +39527,10 @@ def test_get_node_type_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_node_type(request) + client.list_network_policies(request) -def test_get_node_type_rest_flattened(): +def test_list_network_policies_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17507,16 +39539,14 @@ def test_get_node_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NodeType() + return_value = vmwareengine.ListNetworkPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/nodeTypes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -17524,95 +39554,224 @@ def test_get_node_type_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.NodeType.pb(return_value) + return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_node_type(**mock_args) + client.list_network_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/nodeTypes/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/networkPolicies" + % client.transport._host, args[1], ) -def test_get_node_type_rest_flattened_error(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_list_network_policies_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_policies( + vmwareengine.ListNetworkPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_network_policies_rest_pager(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="abc", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[], + next_page_token="def", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + ], + next_page_token="ghi", + ), + vmwareengine.ListNetworkPoliciesResponse( + network_policies=[ + vmwareengine_resources.NetworkPolicy(), + vmwareengine_resources.NetworkPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListNetworkPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_node_type( - vmwareengine.GetNodeTypeRequest(), - name="name_value", - ) + pager = client.list_network_policies(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) -def test_get_node_type_rest_error(): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_network_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - vmwareengine.ShowNsxCredentialsRequest, + vmwareengine.CreateNetworkPolicyRequest, dict, ], ) -def test_show_nsx_credentials_rest(request_type): +def test_create_network_policy_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["network_policy"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "internet_access": {"enabled": True, "state": 1}, + "external_ip": {}, + "edge_services_cidr": "edge_services_cidr_value", + "uid": "uid_value", + "vmware_engine_network": "vmware_engine_network_value", + "description": "description_value", + "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateNetworkPolicyRequest.meta.fields["network_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_policy"][field])): + del request_init["network_policy"][field][i][subfield] + else: + del request_init["network_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.show_nsx_credentials(request) + response = client.create_network_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert response.operation.name == "operations/spam" -def test_show_nsx_credentials_rest_required_fields( - request_type=vmwareengine.ShowNsxCredentialsRequest, +def test_create_network_policy_rest_required_fields( + request_type=vmwareengine.CreateNetworkPolicyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["private_cloud"] = "" + request_init["parent"] = "" + request_init["network_policy_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17624,24 +39783,37 @@ def test_show_nsx_credentials_rest_required_fields( ) # verify fields with default values are dropped + assert "networkPolicyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).show_nsx_credentials._get_unset_required_fields(jsonified_request) + ).create_network_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "networkPolicyId" in jsonified_request + assert jsonified_request["networkPolicyId"] == request_init["network_policy_id"] - jsonified_request["privateCloud"] = "private_cloud_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["networkPolicyId"] = "network_policy_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).show_nsx_credentials._get_unset_required_fields(jsonified_request) + ).create_network_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "network_policy_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "privateCloud" in jsonified_request - assert jsonified_request["privateCloud"] == "private_cloud_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "networkPolicyId" in jsonified_request + assert jsonified_request["networkPolicyId"] == "network_policy_id_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17650,7 +39822,7 @@ def test_show_nsx_credentials_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17662,39 +39834,57 @@ def test_show_nsx_credentials_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.show_nsx_credentials(request) + response = client.create_network_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "networkPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_show_nsx_credentials_rest_unset_required_fields(): +def test_create_network_policy_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.show_nsx_credentials._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("privateCloud",))) + unset_fields = transport.create_network_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "networkPolicyId", + "requestId", + ) + ) + & set( + ( + "parent", + "networkPolicyId", + "networkPolicy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_show_nsx_credentials_rest_interceptors(null_interceptor): +def test_create_network_policy_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17707,14 +39897,16 @@ def test_show_nsx_credentials_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_show_nsx_credentials" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_create_network_policy" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_show_nsx_credentials" + transports.VmwareEngineRestInterceptor, "pre_create_network_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ShowNsxCredentialsRequest.pb( - vmwareengine.ShowNsxCredentialsRequest() + pb_message = vmwareengine.CreateNetworkPolicyRequest.pb( + vmwareengine.CreateNetworkPolicyRequest() ) transcode.return_value = { "method": "post", @@ -17726,19 +39918,19 @@ def test_show_nsx_credentials_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.Credentials.to_json( - vmwareengine_resources.Credentials() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ShowNsxCredentialsRequest() + request = vmwareengine.CreateNetworkPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.Credentials() + post.return_value = operations_pb2.Operation() - client.show_nsx_credentials( + client.create_network_policy( request, metadata=[ ("key", "val"), @@ -17750,8 +39942,8 @@ def test_show_nsx_credentials_rest_interceptors(null_interceptor): post.assert_called_once() -def test_show_nsx_credentials_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ShowNsxCredentialsRequest +def test_create_network_policy_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateNetworkPolicyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17759,9 +39951,7 @@ def test_show_nsx_credentials_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17773,10 +39963,10 @@ def test_show_nsx_credentials_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.show_nsx_credentials(request) + client.create_network_policy(request) -def test_show_nsx_credentials_rest_flattened(): +def test_create_network_policy_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17785,42 +39975,40 @@ def test_show_nsx_credentials_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - private_cloud="private_cloud_value", + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.show_nsx_credentials(**mock_args) + client.create_network_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showNsxCredentials" + "%s/v1/{parent=projects/*/locations/*}/networkPolicies" % client.transport._host, args[1], ) -def test_show_nsx_credentials_rest_flattened_error(transport: str = "rest"): +def test_create_network_policy_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17829,13 +40017,15 @@ def test_show_nsx_credentials_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.show_nsx_credentials( - vmwareengine.ShowNsxCredentialsRequest(), - private_cloud="private_cloud_value", + client.create_network_policy( + vmwareengine.CreateNetworkPolicyRequest(), + parent="parent_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + network_policy_id="network_policy_id_value", ) -def test_show_nsx_credentials_rest_error(): +def test_create_network_policy_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -17844,11 +40034,11 @@ def test_show_nsx_credentials_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ShowVcenterCredentialsRequest, + vmwareengine.UpdateNetworkPolicyRequest, dict, ], ) -def test_show_vcenter_credentials_rest(request_type): +def test_update_network_policy_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17856,42 +40046,115 @@ def test_show_vcenter_credentials_rest(request_type): # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "network_policy": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } + } + request_init["network_policy"] = { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "internet_access": {"enabled": True, "state": 1}, + "external_ip": {}, + "edge_services_cidr": "edge_services_cidr_value", + "uid": "uid_value", + "vmware_engine_network": "vmware_engine_network_value", + "description": "description_value", + "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateNetworkPolicyRequest.meta.fields["network_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["network_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["network_policy"][field])): + del request_init["network_policy"][field][i][subfield] + else: + del request_init["network_policy"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials( - username="username_value", - password="password_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.show_vcenter_credentials(request) + response = client.update_network_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.Credentials) - assert response.username == "username_value" - assert response.password == "password_value" + assert response.operation.name == "operations/spam" -def test_show_vcenter_credentials_rest_required_fields( - request_type=vmwareengine.ShowVcenterCredentialsRequest, +def test_update_network_policy_rest_required_fields( + request_type=vmwareengine.UpdateNetworkPolicyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["private_cloud"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17906,21 +40169,24 @@ def test_show_vcenter_credentials_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).show_vcenter_credentials._get_unset_required_fields(jsonified_request) + ).update_network_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["privateCloud"] = "private_cloud_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).show_vcenter_credentials._get_unset_required_fields(jsonified_request) + ).update_network_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "privateCloud" in jsonified_request - assert jsonified_request["privateCloud"] == "private_cloud_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17929,7 +40195,7 @@ def test_show_vcenter_credentials_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17941,39 +40207,50 @@ def test_show_vcenter_credentials_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.show_vcenter_credentials(request) + response = client.update_network_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_show_vcenter_credentials_rest_unset_required_fields(): +def test_update_network_policy_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.show_vcenter_credentials._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("privateCloud",))) + unset_fields = transport.update_network_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "networkPolicy", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_show_vcenter_credentials_rest_interceptors(null_interceptor): +def test_update_network_policy_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17986,14 +40263,16 @@ def test_show_vcenter_credentials_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_show_vcenter_credentials" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_update_network_policy" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_show_vcenter_credentials" + transports.VmwareEngineRestInterceptor, "pre_update_network_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ShowVcenterCredentialsRequest.pb( - vmwareengine.ShowVcenterCredentialsRequest() + pb_message = vmwareengine.UpdateNetworkPolicyRequest.pb( + vmwareengine.UpdateNetworkPolicyRequest() ) transcode.return_value = { "method": "post", @@ -18005,19 +40284,19 @@ def test_show_vcenter_credentials_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.Credentials.to_json( - vmwareengine_resources.Credentials() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ShowVcenterCredentialsRequest() + request = vmwareengine.UpdateNetworkPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.Credentials() + post.return_value = operations_pb2.Operation() - client.show_vcenter_credentials( + client.update_network_policy( request, metadata=[ ("key", "val"), @@ -18029,8 +40308,8 @@ def test_show_vcenter_credentials_rest_interceptors(null_interceptor): post.assert_called_once() -def test_show_vcenter_credentials_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ShowVcenterCredentialsRequest +def test_update_network_policy_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateNetworkPolicyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18039,7 +40318,9 @@ def test_show_vcenter_credentials_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "network_policy": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } } request = request_type(**request_init) @@ -18052,10 +40333,10 @@ def test_show_vcenter_credentials_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.show_vcenter_credentials(request) + client.update_network_policy(request) -def test_show_vcenter_credentials_rest_flattened(): +def test_update_network_policy_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18064,42 +40345,43 @@ def test_show_vcenter_credentials_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.Credentials() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "network_policy": { + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + } } # get truthy value for each flattened field mock_args = dict( - private_cloud="private_cloud_value", + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.Credentials.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.show_vcenter_credentials(**mock_args) + client.update_network_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:showVcenterCredentials" + "%s/v1/{network_policy.name=projects/*/locations/*/networkPolicies/*}" % client.transport._host, args[1], ) -def test_show_vcenter_credentials_rest_flattened_error(transport: str = "rest"): +def test_update_network_policy_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18108,13 +40390,14 @@ def test_show_vcenter_credentials_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.show_vcenter_credentials( - vmwareengine.ShowVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + client.update_network_policy( + vmwareengine.UpdateNetworkPolicyRequest(), + network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_show_vcenter_credentials_rest_error(): +def test_update_network_policy_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18123,11 +40406,11 @@ def test_show_vcenter_credentials_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ResetNsxCredentialsRequest, + vmwareengine.DeleteNetworkPolicyRequest, dict, ], ) -def test_reset_nsx_credentials_rest(request_type): +def test_delete_network_policy_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18135,7 +40418,7 @@ def test_reset_nsx_credentials_rest(request_type): # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" } request = request_type(**request_init) @@ -18151,19 +40434,19 @@ def test_reset_nsx_credentials_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_nsx_credentials(request) + response = client.delete_network_policy(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_reset_nsx_credentials_rest_required_fields( - request_type=vmwareengine.ResetNsxCredentialsRequest, +def test_delete_network_policy_rest_required_fields( + request_type=vmwareengine.DeleteNetworkPolicyRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["private_cloud"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18178,21 +40461,23 @@ def test_reset_nsx_credentials_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_nsx_credentials._get_unset_required_fields(jsonified_request) + ).delete_network_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["privateCloud"] = "private_cloud_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_nsx_credentials._get_unset_required_fields(jsonified_request) + ).delete_network_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "privateCloud" in jsonified_request - assert jsonified_request["privateCloud"] == "private_cloud_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18213,10 +40498,9 @@ def test_reset_nsx_credentials_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -18226,24 +40510,24 @@ def test_reset_nsx_credentials_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_nsx_credentials(request) + response = client.delete_network_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_reset_nsx_credentials_rest_unset_required_fields(): +def test_delete_network_policy_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.reset_nsx_credentials._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("privateCloud",))) + unset_fields = transport.delete_network_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reset_nsx_credentials_rest_interceptors(null_interceptor): +def test_delete_network_policy_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18258,14 +40542,14 @@ def test_reset_nsx_credentials_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_reset_nsx_credentials" + transports.VmwareEngineRestInterceptor, "post_delete_network_policy" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_reset_nsx_credentials" + transports.VmwareEngineRestInterceptor, "pre_delete_network_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ResetNsxCredentialsRequest.pb( - vmwareengine.ResetNsxCredentialsRequest() + pb_message = vmwareengine.DeleteNetworkPolicyRequest.pb( + vmwareengine.DeleteNetworkPolicyRequest() ) transcode.return_value = { "method": "post", @@ -18281,7 +40565,7 @@ def test_reset_nsx_credentials_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.ResetNsxCredentialsRequest() + request = vmwareengine.DeleteNetworkPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18289,7 +40573,7 @@ def test_reset_nsx_credentials_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.reset_nsx_credentials( + client.delete_network_policy( request, metadata=[ ("key", "val"), @@ -18301,8 +40585,8 @@ def test_reset_nsx_credentials_rest_interceptors(null_interceptor): post.assert_called_once() -def test_reset_nsx_credentials_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ResetNsxCredentialsRequest +def test_delete_network_policy_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteNetworkPolicyRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18311,7 +40595,7 @@ def test_reset_nsx_credentials_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" } request = request_type(**request_init) @@ -18324,10 +40608,10 @@ def test_reset_nsx_credentials_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.reset_nsx_credentials(request) + client.delete_network_policy(request) -def test_reset_nsx_credentials_rest_flattened(): +def test_delete_network_policy_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18340,12 +40624,12 @@ def test_reset_nsx_credentials_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/networkPolicies/sample3" } # get truthy value for each flattened field mock_args = dict( - private_cloud="private_cloud_value", + name="name_value", ) mock_args.update(sample_request) @@ -18356,20 +40640,20 @@ def test_reset_nsx_credentials_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.reset_nsx_credentials(**mock_args) + client.delete_network_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetNsxCredentials" + "%s/v1/{name=projects/*/locations/*/networkPolicies/*}" % client.transport._host, args[1], ) -def test_reset_nsx_credentials_rest_flattened_error(transport: str = "rest"): +def test_delete_network_policy_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18378,13 +40662,13 @@ def test_reset_nsx_credentials_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.reset_nsx_credentials( - vmwareengine.ResetNsxCredentialsRequest(), - private_cloud="private_cloud_value", + client.delete_network_policy( + vmwareengine.DeleteNetworkPolicyRequest(), + name="name_value", ) -def test_reset_nsx_credentials_rest_error(): +def test_delete_network_policy_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -18393,11 +40677,11 @@ def test_reset_nsx_credentials_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ResetVcenterCredentialsRequest, + vmwareengine.ListManagementDnsZoneBindingsRequest, dict, ], ) -def test_reset_vcenter_credentials_rest(request_type): +def test_list_management_dns_zone_bindings_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18405,35 +40689,44 @@ def test_reset_vcenter_credentials_rest(request_type): # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_vcenter_credentials(request) + response = client.list_management_dns_zone_bindings(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListManagementDnsZoneBindingsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_reset_vcenter_credentials_rest_required_fields( - request_type=vmwareengine.ResetVcenterCredentialsRequest, +def test_list_management_dns_zone_bindings_rest_required_fields( + request_type=vmwareengine.ListManagementDnsZoneBindingsRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["private_cloud"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18448,21 +40741,30 @@ def test_reset_vcenter_credentials_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_vcenter_credentials._get_unset_required_fields(jsonified_request) + ).list_management_dns_zone_bindings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["privateCloud"] = "private_cloud_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_vcenter_credentials._get_unset_required_fields(jsonified_request) + ).list_management_dns_zone_bindings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "privateCloud" in jsonified_request - assert jsonified_request["privateCloud"] == "private_cloud_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18471,7 +40773,7 @@ def test_reset_vcenter_credentials_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18483,37 +40785,53 @@ def test_reset_vcenter_credentials_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_vcenter_credentials(request) + response = client.list_management_dns_zone_bindings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_reset_vcenter_credentials_rest_unset_required_fields(): +def test_list_management_dns_zone_bindings_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.reset_vcenter_credentials._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("privateCloud",))) + unset_fields = ( + transport.list_management_dns_zone_bindings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reset_vcenter_credentials_rest_interceptors(null_interceptor): +def test_list_management_dns_zone_bindings_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18526,16 +40844,14 @@ def test_reset_vcenter_credentials_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_reset_vcenter_credentials" + transports.VmwareEngineRestInterceptor, "post_list_management_dns_zone_bindings" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_reset_vcenter_credentials" + transports.VmwareEngineRestInterceptor, "pre_list_management_dns_zone_bindings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ResetVcenterCredentialsRequest.pb( - vmwareengine.ResetVcenterCredentialsRequest() + pb_message = vmwareengine.ListManagementDnsZoneBindingsRequest.pb( + vmwareengine.ListManagementDnsZoneBindingsRequest() ) transcode.return_value = { "method": "post", @@ -18547,19 +40863,21 @@ def test_reset_vcenter_credentials_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + vmwareengine.ListManagementDnsZoneBindingsResponse.to_json( + vmwareengine.ListManagementDnsZoneBindingsResponse() + ) ) - request = vmwareengine.ResetVcenterCredentialsRequest() + request = vmwareengine.ListManagementDnsZoneBindingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() - client.reset_vcenter_credentials( + client.list_management_dns_zone_bindings( request, metadata=[ ("key", "val"), @@ -18571,8 +40889,9 @@ def test_reset_vcenter_credentials_rest_interceptors(null_interceptor): post.assert_called_once() -def test_reset_vcenter_credentials_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ResetVcenterCredentialsRequest +def test_list_management_dns_zone_bindings_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.ListManagementDnsZoneBindingsRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18581,7 +40900,7 @@ def test_reset_vcenter_credentials_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } request = request_type(**request_init) @@ -18594,10 +40913,10 @@ def test_reset_vcenter_credentials_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.reset_vcenter_credentials(request) + client.list_management_dns_zone_bindings(request) -def test_reset_vcenter_credentials_rest_flattened(): +def test_list_management_dns_zone_bindings_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18606,40 +40925,46 @@ def test_reset_vcenter_credentials_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "private_cloud": "projects/sample1/locations/sample2/privateClouds/sample3" + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } # get truthy value for each flattened field mock_args = dict( - private_cloud="private_cloud_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListManagementDnsZoneBindingsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.reset_vcenter_credentials(**mock_args) + client.list_management_dns_zone_bindings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_cloud=projects/*/locations/*/privateClouds/*}:resetVcenterCredentials" + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/managementDnsZoneBindings" % client.transport._host, args[1], ) -def test_reset_vcenter_credentials_rest_flattened_error(transport: str = "rest"): +def test_list_management_dns_zone_bindings_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18648,26 +40973,91 @@ def test_reset_vcenter_credentials_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.reset_vcenter_credentials( - vmwareengine.ResetVcenterCredentialsRequest(), - private_cloud="private_cloud_value", + client.list_management_dns_zone_bindings( + vmwareengine.ListManagementDnsZoneBindingsRequest(), + parent="parent_value", ) -def test_reset_vcenter_credentials_rest_error(): +def test_list_management_dns_zone_bindings_rest_pager(transport: str = "rest"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="abc", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[], + next_page_token="def", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + next_page_token="ghi", + ), + vmwareengine.ListManagementDnsZoneBindingsResponse( + management_dns_zone_bindings=[ + vmwareengine_resources.ManagementDnsZoneBinding(), + vmwareengine_resources.ManagementDnsZoneBinding(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListManagementDnsZoneBindingsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + } + + pager = client.list_management_dns_zone_bindings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.ManagementDnsZoneBinding) + for i in results + ) + + pages = list( + client.list_management_dns_zone_bindings(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateHcxActivationKeyRequest, + vmwareengine.GetManagementDnsZoneBindingRequest, dict, ], ) -def test_create_hcx_activation_key_rest(request_type): +def test_get_management_dns_zone_binding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18675,112 +41065,49 @@ def test_create_hcx_activation_key_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } - request_init["hcx_activation_key"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "state": 1, - "activation_key": "activation_key_value", - "uid": "uid_value", + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreateHcxActivationKeyRequest.meta.fields[ - "hcx_activation_key" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["hcx_activation_key"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["hcx_activation_key"][field])): - del request_init["hcx_activation_key"][field][i][subfield] - else: - del request_init["hcx_activation_key"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value", + state=vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE, + description="description_value", + uid="uid_value", + vpc_network="vpc_network_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ManagementDnsZoneBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hcx_activation_key(request) + response = client.get_management_dns_zone_binding(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.ManagementDnsZoneBinding) + assert response.name == "name_value" + assert ( + response.state == vmwareengine_resources.ManagementDnsZoneBinding.State.ACTIVE + ) + assert response.description == "description_value" + assert response.uid == "uid_value" -def test_create_hcx_activation_key_rest_required_fields( - request_type=vmwareengine.CreateHcxActivationKeyRequest, +def test_get_management_dns_zone_binding_rest_required_fields( + request_type=vmwareengine.GetManagementDnsZoneBindingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" - request_init["hcx_activation_key_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18792,39 +41119,24 @@ def test_create_hcx_activation_key_rest_required_fields( ) # verify fields with default values are dropped - assert "hcxActivationKeyId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hcx_activation_key._get_unset_required_fields(jsonified_request) + ).get_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "hcxActivationKeyId" in jsonified_request - assert ( - jsonified_request["hcxActivationKeyId"] == request_init["hcx_activation_key_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["hcxActivationKeyId"] = "hcx_activation_key_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_hcx_activation_key._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "hcx_activation_key_id", - "request_id", - ) - ) + ).get_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "hcxActivationKeyId" in jsonified_request - assert jsonified_request["hcxActivationKeyId"] == "hcx_activation_key_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18833,7 +41145,7 @@ def test_create_hcx_activation_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.ManagementDnsZoneBinding() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18845,57 +41157,43 @@ def test_create_hcx_activation_key_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.ManagementDnsZoneBinding.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_hcx_activation_key(request) + response = client.get_management_dns_zone_binding(request) - expected_params = [ - ( - "hcxActivationKeyId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_hcx_activation_key_rest_unset_required_fields(): +def test_get_management_dns_zone_binding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_hcx_activation_key._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "hcxActivationKeyId", - "requestId", - ) - ) - & set( - ( - "parent", - "hcxActivationKey", - "hcxActivationKeyId", - ) - ) + unset_fields = transport.get_management_dns_zone_binding._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_hcx_activation_key_rest_interceptors(null_interceptor): +def test_get_management_dns_zone_binding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18908,16 +41206,14 @@ def test_create_hcx_activation_key_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_hcx_activation_key" + transports.VmwareEngineRestInterceptor, "post_get_management_dns_zone_binding" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_hcx_activation_key" + transports.VmwareEngineRestInterceptor, "pre_get_management_dns_zone_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.CreateHcxActivationKeyRequest.pb( - vmwareengine.CreateHcxActivationKeyRequest() + pb_message = vmwareengine.GetManagementDnsZoneBindingRequest.pb( + vmwareengine.GetManagementDnsZoneBindingRequest() ) transcode.return_value = { "method": "post", @@ -18929,19 +41225,21 @@ def test_create_hcx_activation_key_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + vmwareengine_resources.ManagementDnsZoneBinding.to_json( + vmwareengine_resources.ManagementDnsZoneBinding() + ) ) - request = vmwareengine.CreateHcxActivationKeyRequest() + request = vmwareengine.GetManagementDnsZoneBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.ManagementDnsZoneBinding() - client.create_hcx_activation_key( + client.get_management_dns_zone_binding( request, metadata=[ ("key", "val"), @@ -18953,8 +41251,9 @@ def test_create_hcx_activation_key_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_hcx_activation_key_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreateHcxActivationKeyRequest +def test_get_management_dns_zone_binding_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.GetManagementDnsZoneBindingRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18963,7 +41262,7 @@ def test_create_hcx_activation_key_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } request = request_type(**request_init) @@ -18976,10 +41275,10 @@ def test_create_hcx_activation_key_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_hcx_activation_key(request) + client.get_management_dns_zone_binding(request) -def test_create_hcx_activation_key_rest_flattened(): +def test_get_management_dns_zone_binding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18988,44 +41287,42 @@ def test_create_hcx_activation_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.ManagementDnsZoneBinding() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.ManagementDnsZoneBinding.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_hcx_activation_key(**mock_args) + client.get_management_dns_zone_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys" + "%s/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}" % client.transport._host, args[1], ) -def test_create_hcx_activation_key_rest_flattened_error(transport: str = "rest"): +def test_get_management_dns_zone_binding_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19034,17 +41331,13 @@ def test_create_hcx_activation_key_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_hcx_activation_key( - vmwareengine.CreateHcxActivationKeyRequest(), - parent="parent_value", - hcx_activation_key=vmwareengine_resources.HcxActivationKey( - name="name_value" - ), - hcx_activation_key_id="hcx_activation_key_id_value", + client.get_management_dns_zone_binding( + vmwareengine.GetManagementDnsZoneBindingRequest(), + name="name_value", ) -def test_create_hcx_activation_key_rest_error(): +def test_get_management_dns_zone_binding_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19053,11 +41346,11 @@ def test_create_hcx_activation_key_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListHcxActivationKeysRequest, + vmwareengine.CreateManagementDnsZoneBindingRequest, dict, ], ) -def test_list_hcx_activation_keys_rest(request_type): +def test_create_management_dns_zone_binding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19067,40 +41360,117 @@ def test_list_hcx_activation_keys_rest(request_type): request_init = { "parent": "projects/sample1/locations/sample2/privateClouds/sample3" } + request_init["management_dns_zone_binding"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "description": "description_value", + "vpc_network": "vpc_network_value", + "vmware_engine_network": "vmware_engine_network_value", + "uid": "uid_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreateManagementDnsZoneBindingRequest.meta.fields[ + "management_dns_zone_binding" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "management_dns_zone_binding" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["management_dns_zone_binding"][field]) + ): + del request_init["management_dns_zone_binding"][field][i][subfield] + else: + del request_init["management_dns_zone_binding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListHcxActivationKeysResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hcx_activation_keys(request) + response = client.create_management_dns_zone_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHcxActivationKeysPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_hcx_activation_keys_rest_required_fields( - request_type=vmwareengine.ListHcxActivationKeysRequest, +def test_create_management_dns_zone_binding_rest_required_fields( + request_type=vmwareengine.CreateManagementDnsZoneBindingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} request_init["parent"] = "" + request_init["management_dns_zone_binding_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19112,24 +41482,33 @@ def test_list_hcx_activation_keys_rest_required_fields( ) # verify fields with default values are dropped + assert "managementDnsZoneBindingId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hcx_activation_keys._get_unset_required_fields(jsonified_request) + ).create_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "managementDnsZoneBindingId" in jsonified_request + assert ( + jsonified_request["managementDnsZoneBindingId"] + == request_init["management_dns_zone_binding_id"] + ) jsonified_request["parent"] = "parent_value" + jsonified_request[ + "managementDnsZoneBindingId" + ] = "management_dns_zone_binding_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_hcx_activation_keys._get_unset_required_fields(jsonified_request) + ).create_management_dns_zone_binding._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", + "management_dns_zone_binding_id", + "request_id", ) ) jsonified_request.update(unset_fields) @@ -19137,6 +41516,11 @@ def test_list_hcx_activation_keys_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "managementDnsZoneBindingId" in jsonified_request + assert ( + jsonified_request["managementDnsZoneBindingId"] + == "management_dns_zone_binding_id_value" + ) client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19145,7 +41529,7 @@ def test_list_hcx_activation_keys_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListHcxActivationKeysResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19157,47 +41541,59 @@ def test_list_hcx_activation_keys_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_hcx_activation_keys(request) + response = client.create_management_dns_zone_binding(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "managementDnsZoneBindingId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_hcx_activation_keys_rest_unset_required_fields(): +def test_create_management_dns_zone_binding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_hcx_activation_keys._get_unset_required_fields({}) + unset_fields = ( + transport.create_management_dns_zone_binding._get_unset_required_fields({}) + ) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", + "managementDnsZoneBindingId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementDnsZoneBinding", + "managementDnsZoneBindingId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_hcx_activation_keys_rest_interceptors(null_interceptor): +def test_create_management_dns_zone_binding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19210,14 +41606,17 @@ def test_list_hcx_activation_keys_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_hcx_activation_keys" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, + "post_create_management_dns_zone_binding", ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_hcx_activation_keys" + transports.VmwareEngineRestInterceptor, "pre_create_management_dns_zone_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListHcxActivationKeysRequest.pb( - vmwareengine.ListHcxActivationKeysRequest() + pb_message = vmwareengine.CreateManagementDnsZoneBindingRequest.pb( + vmwareengine.CreateManagementDnsZoneBindingRequest() ) transcode.return_value = { "method": "post", @@ -19229,19 +41628,19 @@ def test_list_hcx_activation_keys_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListHcxActivationKeysResponse.to_json( - vmwareengine.ListHcxActivationKeysResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ListHcxActivationKeysRequest() + request = vmwareengine.CreateManagementDnsZoneBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListHcxActivationKeysResponse() + post.return_value = operations_pb2.Operation() - client.list_hcx_activation_keys( + client.create_management_dns_zone_binding( request, metadata=[ ("key", "val"), @@ -19253,8 +41652,9 @@ def test_list_hcx_activation_keys_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_hcx_activation_keys_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListHcxActivationKeysRequest +def test_create_management_dns_zone_binding_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.CreateManagementDnsZoneBindingRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19276,10 +41676,10 @@ def test_list_hcx_activation_keys_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_hcx_activation_keys(request) + client.create_management_dns_zone_binding(request) -def test_list_hcx_activation_keys_rest_flattened(): +def test_create_management_dns_zone_binding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19288,7 +41688,7 @@ def test_list_hcx_activation_keys_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListHcxActivationKeysResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -19298,32 +41698,36 @@ def test_list_hcx_activation_keys_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListHcxActivationKeysResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_hcx_activation_keys(**mock_args) + client.create_management_dns_zone_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/hcxActivationKeys" + "%s/v1/{parent=projects/*/locations/*/privateClouds/*}/managementDnsZoneBindings" % client.transport._host, args[1], ) -def test_list_hcx_activation_keys_rest_flattened_error(transport: str = "rest"): +def test_create_management_dns_zone_binding_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19332,87 +41736,30 @@ def test_list_hcx_activation_keys_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_hcx_activation_keys( - vmwareengine.ListHcxActivationKeysRequest(), + client.create_management_dns_zone_binding( + vmwareengine.CreateManagementDnsZoneBindingRequest(), parent="parent_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + management_dns_zone_binding_id="management_dns_zone_binding_id_value", ) -def test_list_hcx_activation_keys_rest_pager(transport: str = "rest"): +def test_create_management_dns_zone_binding_rest_error(): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - ], - next_page_token="abc", - ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[], - next_page_token="def", - ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - ], - next_page_token="ghi", - ), - vmwareengine.ListHcxActivationKeysResponse( - hcx_activation_keys=[ - vmwareengine_resources.HcxActivationKey(), - vmwareengine_resources.HcxActivationKey(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - vmwareengine.ListHcxActivationKeysResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/privateClouds/sample3" - } - - pager = client.list_hcx_activation_keys(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, vmwareengine_resources.HcxActivationKey) for i in results - ) - - pages = list(client.list_hcx_activation_keys(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetHcxActivationKeyRequest, + vmwareengine.UpdateManagementDnsZoneBindingRequest, dict, ], ) -def test_get_hcx_activation_key_rest(request_type): +def test_update_management_dns_zone_binding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19420,46 +41767,119 @@ def test_get_hcx_activation_key_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" + "management_dns_zone_binding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } + } + request_init["management_dns_zone_binding"] = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "description": "description_value", + "vpc_network": "vpc_network_value", + "vmware_engine_network": "vmware_engine_network_value", + "uid": "uid_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.UpdateManagementDnsZoneBindingRequest.meta.fields[ + "management_dns_zone_binding" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "management_dns_zone_binding" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["management_dns_zone_binding"][field]) + ): + del request_init["management_dns_zone_binding"][field][i][subfield] + else: + del request_init["management_dns_zone_binding"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.HcxActivationKey( - name="name_value", - state=vmwareengine_resources.HcxActivationKey.State.AVAILABLE, - activation_key="activation_key_value", - uid="uid_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hcx_activation_key(request) + response = client.update_management_dns_zone_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.HcxActivationKey) - assert response.name == "name_value" - assert response.state == vmwareengine_resources.HcxActivationKey.State.AVAILABLE - assert response.activation_key == "activation_key_value" - assert response.uid == "uid_value" + assert response.operation.name == "operations/spam" -def test_get_hcx_activation_key_rest_required_fields( - request_type=vmwareengine.GetHcxActivationKeyRequest, +def test_update_management_dns_zone_binding_rest_required_fields( + request_type=vmwareengine.UpdateManagementDnsZoneBindingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19474,21 +41894,24 @@ def test_get_hcx_activation_key_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hcx_activation_key._get_unset_required_fields(jsonified_request) + ).update_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_hcx_activation_key._get_unset_required_fields(jsonified_request) + ).update_management_dns_zone_binding._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19497,7 +41920,7 @@ def test_get_hcx_activation_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.HcxActivationKey() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19509,39 +41932,52 @@ def test_get_hcx_activation_key_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_hcx_activation_key(request) + response = client.update_management_dns_zone_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_hcx_activation_key_rest_unset_required_fields(): +def test_update_management_dns_zone_binding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_hcx_activation_key._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.update_management_dns_zone_binding._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "managementDnsZoneBinding", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_hcx_activation_key_rest_interceptors(null_interceptor): +def test_update_management_dns_zone_binding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19554,14 +41990,17 @@ def test_get_hcx_activation_key_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_hcx_activation_key" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, + "post_update_management_dns_zone_binding", ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_hcx_activation_key" + transports.VmwareEngineRestInterceptor, "pre_update_management_dns_zone_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetHcxActivationKeyRequest.pb( - vmwareengine.GetHcxActivationKeyRequest() + pb_message = vmwareengine.UpdateManagementDnsZoneBindingRequest.pb( + vmwareengine.UpdateManagementDnsZoneBindingRequest() ) transcode.return_value = { "method": "post", @@ -19573,19 +42012,19 @@ def test_get_hcx_activation_key_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.HcxActivationKey.to_json( - vmwareengine_resources.HcxActivationKey() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.GetHcxActivationKeyRequest() + request = vmwareengine.UpdateManagementDnsZoneBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.HcxActivationKey() + post.return_value = operations_pb2.Operation() - client.get_hcx_activation_key( + client.update_management_dns_zone_binding( request, metadata=[ ("key", "val"), @@ -19597,8 +42036,9 @@ def test_get_hcx_activation_key_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_hcx_activation_key_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetHcxActivationKeyRequest +def test_update_management_dns_zone_binding_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.UpdateManagementDnsZoneBindingRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19607,7 +42047,9 @@ def test_get_hcx_activation_key_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" + "management_dns_zone_binding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } } request = request_type(**request_init) @@ -19620,10 +42062,10 @@ def test_get_hcx_activation_key_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_hcx_activation_key(request) + client.update_management_dns_zone_binding(request) -def test_get_hcx_activation_key_rest_flattened(): +def test_update_management_dns_zone_binding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19632,42 +42074,47 @@ def test_get_hcx_activation_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.HcxActivationKey() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateClouds/sample3/hcxActivationKeys/sample4" + "management_dns_zone_binding": { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.HcxActivationKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_hcx_activation_key(**mock_args) + client.update_management_dns_zone_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateClouds/*/hcxActivationKeys/*}" + "%s/v1/{management_dns_zone_binding.name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}" % client.transport._host, args[1], ) -def test_get_hcx_activation_key_rest_flattened_error(transport: str = "rest"): +def test_update_management_dns_zone_binding_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19676,13 +42123,16 @@ def test_get_hcx_activation_key_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_hcx_activation_key( - vmwareengine.GetHcxActivationKeyRequest(), - name="name_value", + client.update_management_dns_zone_binding( + vmwareengine.UpdateManagementDnsZoneBindingRequest(), + management_dns_zone_binding=vmwareengine_resources.ManagementDnsZoneBinding( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_hcx_activation_key_rest_error(): +def test_update_management_dns_zone_binding_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19691,11 +42141,11 @@ def test_get_hcx_activation_key_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetNetworkPolicyRequest, + vmwareengine.DeleteManagementDnsZoneBindingRequest, dict, ], ) -def test_get_network_policy_rest(request_type): +def test_delete_management_dns_zone_binding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19703,48 +42153,30 @@ def test_get_network_policy_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NetworkPolicy( - name="name_value", - edge_services_cidr="edge_services_cidr_value", - uid="uid_value", - vmware_engine_network="vmware_engine_network_value", - description="description_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_network_policy(request) + response = client.delete_management_dns_zone_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.NetworkPolicy) - assert response.name == "name_value" - assert response.edge_services_cidr == "edge_services_cidr_value" - assert response.uid == "uid_value" - assert response.vmware_engine_network == "vmware_engine_network_value" - assert response.description == "description_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) + assert response.operation.name == "operations/spam" -def test_get_network_policy_rest_required_fields( - request_type=vmwareengine.GetNetworkPolicyRequest, +def test_delete_management_dns_zone_binding_rest_required_fields( + request_type=vmwareengine.DeleteManagementDnsZoneBindingRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -19764,7 +42196,7 @@ def test_get_network_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_network_policy._get_unset_required_fields(jsonified_request) + ).delete_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19773,7 +42205,9 @@ def test_get_network_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_network_policy._get_unset_required_fields(jsonified_request) + ).delete_management_dns_zone_binding._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19787,7 +42221,7 @@ def test_get_network_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NetworkPolicy() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19799,39 +42233,38 @@ def test_get_network_policy_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_network_policy(request) + response = client.delete_management_dns_zone_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_network_policy_rest_unset_required_fields(): +def test_delete_management_dns_zone_binding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_network_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.delete_management_dns_zone_binding._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_network_policy_rest_interceptors(null_interceptor): +def test_delete_management_dns_zone_binding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19844,14 +42277,17 @@ def test_get_network_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_network_policy" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, + "post_delete_management_dns_zone_binding", ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_network_policy" + transports.VmwareEngineRestInterceptor, "pre_delete_management_dns_zone_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetNetworkPolicyRequest.pb( - vmwareengine.GetNetworkPolicyRequest() + pb_message = vmwareengine.DeleteManagementDnsZoneBindingRequest.pb( + vmwareengine.DeleteManagementDnsZoneBindingRequest() ) transcode.return_value = { "method": "post", @@ -19863,19 +42299,19 @@ def test_get_network_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.NetworkPolicy.to_json( - vmwareengine_resources.NetworkPolicy() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.GetNetworkPolicyRequest() + request = vmwareengine.DeleteManagementDnsZoneBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.NetworkPolicy() + post.return_value = operations_pb2.Operation() - client.get_network_policy( + client.delete_management_dns_zone_binding( request, metadata=[ ("key", "val"), @@ -19887,8 +42323,9 @@ def test_get_network_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_network_policy_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetNetworkPolicyRequest +def test_delete_management_dns_zone_binding_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.DeleteManagementDnsZoneBindingRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19897,7 +42334,7 @@ def test_get_network_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } request = request_type(**request_init) @@ -19910,10 +42347,10 @@ def test_get_network_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_network_policy(request) + client.delete_management_dns_zone_binding(request) -def test_get_network_policy_rest_flattened(): +def test_delete_management_dns_zone_binding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19922,11 +42359,11 @@ def test_get_network_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.NetworkPolicy() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" } # get truthy value for each flattened field @@ -19938,26 +42375,26 @@ def test_get_network_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.NetworkPolicy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_network_policy(**mock_args) + client.delete_management_dns_zone_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/networkPolicies/*}" + "%s/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}" % client.transport._host, args[1], ) -def test_get_network_policy_rest_flattened_error(transport: str = "rest"): +def test_delete_management_dns_zone_binding_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19966,13 +42403,13 @@ def test_get_network_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_network_policy( - vmwareengine.GetNetworkPolicyRequest(), + client.delete_management_dns_zone_binding( + vmwareengine.DeleteManagementDnsZoneBindingRequest(), name="name_value", ) -def test_get_network_policy_rest_error(): +def test_delete_management_dns_zone_binding_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -19981,52 +42418,47 @@ def test_get_network_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListNetworkPoliciesRequest, + vmwareengine.RepairManagementDnsZoneBindingRequest, dict, ], ) -def test_list_network_policies_rest(request_type): +def test_repair_management_dns_zone_binding_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNetworkPoliciesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_network_policies(request) + response = client.repair_management_dns_zone_binding(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworkPoliciesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_network_policies_rest_required_fields( - request_type=vmwareengine.ListNetworkPoliciesRequest, +def test_repair_management_dns_zone_binding_rest_required_fields( + request_type=vmwareengine.RepairManagementDnsZoneBindingRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20041,30 +42473,21 @@ def test_list_network_policies_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_network_policies._get_unset_required_fields(jsonified_request) + ).repair_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_network_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).repair_management_dns_zone_binding._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20073,7 +42496,7 @@ def test_list_network_policies_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNetworkPoliciesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20085,49 +42508,39 @@ def test_list_network_policies_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_network_policies(request) + response = client.repair_management_dns_zone_binding(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_network_policies_rest_unset_required_fields(): +def test_repair_management_dns_zone_binding_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_network_policies._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) + unset_fields = ( + transport.repair_management_dns_zone_binding._get_unset_required_fields({}) ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_network_policies_rest_interceptors(null_interceptor): +def test_repair_management_dns_zone_binding_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20140,14 +42553,17 @@ def test_list_network_policies_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_network_policies" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, + "post_repair_management_dns_zone_binding", ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_network_policies" + transports.VmwareEngineRestInterceptor, "pre_repair_management_dns_zone_binding" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListNetworkPoliciesRequest.pb( - vmwareengine.ListNetworkPoliciesRequest() + pb_message = vmwareengine.RepairManagementDnsZoneBindingRequest.pb( + vmwareengine.RepairManagementDnsZoneBindingRequest() ) transcode.return_value = { "method": "post", @@ -20159,19 +42575,19 @@ def test_list_network_policies_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListNetworkPoliciesResponse.to_json( - vmwareengine.ListNetworkPoliciesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ListNetworkPoliciesRequest() + request = vmwareengine.RepairManagementDnsZoneBindingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListNetworkPoliciesResponse() + post.return_value = operations_pb2.Operation() - client.list_network_policies( + client.repair_management_dns_zone_binding( request, metadata=[ ("key", "val"), @@ -20183,8 +42599,9 @@ def test_list_network_policies_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_network_policies_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListNetworkPoliciesRequest +def test_repair_management_dns_zone_binding_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.RepairManagementDnsZoneBindingRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20192,7 +42609,9 @@ def test_list_network_policies_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20204,10 +42623,10 @@ def test_list_network_policies_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_network_policies(request) + client.repair_management_dns_zone_binding(request) -def test_list_network_policies_rest_flattened(): +def test_repair_management_dns_zone_binding_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20216,125 +42635,70 @@ def test_list_network_policies_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListNetworkPoliciesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/privateClouds/sample3/managementDnsZoneBindings/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListNetworkPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_network_policies(**mock_args) + client.repair_management_dns_zone_binding(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/networkPolicies" + "%s/v1/{name=projects/*/locations/*/privateClouds/*/managementDnsZoneBindings/*}:repair" % client.transport._host, args[1], ) -def test_list_network_policies_rest_flattened_error(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_network_policies( - vmwareengine.ListNetworkPoliciesRequest(), - parent="parent_value", - ) - - -def test_list_network_policies_rest_pager(transport: str = "rest"): +def test_repair_management_dns_zone_binding_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="abc", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[], - next_page_token="def", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - ], - next_page_token="ghi", - ), - vmwareengine.ListNetworkPoliciesResponse( - network_policies=[ - vmwareengine_resources.NetworkPolicy(), - vmwareengine_resources.NetworkPolicy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - vmwareengine.ListNetworkPoliciesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_network_policies(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.repair_management_dns_zone_binding( + vmwareengine.RepairManagementDnsZoneBindingRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.NetworkPolicy) for i in results) - pages = list(client.list_network_policies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_repair_management_dns_zone_binding_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateNetworkPolicyRequest, + vmwareengine.CreateVmwareEngineNetworkRequest, dict, ], ) -def test_create_network_policy_rest(request_type): +def test_create_vmware_engine_network_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20342,24 +42706,25 @@ def test_create_network_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["network_policy"] = { + request_init["vmware_engine_network"] = { "name": "name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "internet_access": {"enabled": True, "state": 1}, - "external_ip": {}, - "edge_services_cidr": "edge_services_cidr_value", - "uid": "uid_value", - "vmware_engine_network": "vmware_engine_network_value", "description": "description_value", - "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", + "vpc_networks": [{"type_": 1, "network": "network_value"}], + "state": 1, + "type_": 1, + "uid": "uid_value", + "etag": "etag_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreateNetworkPolicyRequest.meta.fields["network_policy"] + test_field = vmwareengine.CreateVmwareEngineNetworkRequest.meta.fields[ + "vmware_engine_network" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20387,7 +42752,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["network_policy"].items(): # pragma: NO COVER + for field, value in request_init[ + "vmware_engine_network" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20417,10 +42784,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["network_policy"][field])): - del request_init["network_policy"][field][i][subfield] + for i in range(0, len(request_init["vmware_engine_network"][field])): + del request_init["vmware_engine_network"][field][i][subfield] else: - del request_init["network_policy"][field][subfield] + del request_init["vmware_engine_network"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20435,20 +42802,20 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_network_policy(request) + response = client.create_vmware_engine_network(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_network_policy_rest_required_fields( - request_type=vmwareengine.CreateNetworkPolicyRequest, +def test_create_vmware_engine_network_rest_required_fields( + request_type=vmwareengine.CreateVmwareEngineNetworkRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} request_init["parent"] = "" - request_init["network_policy_id"] = "" + request_init["vmware_engine_network_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20460,28 +42827,31 @@ def test_create_network_policy_rest_required_fields( ) # verify fields with default values are dropped - assert "networkPolicyId" not in jsonified_request + assert "vmwareEngineNetworkId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_network_policy._get_unset_required_fields(jsonified_request) + ).create_vmware_engine_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "networkPolicyId" in jsonified_request - assert jsonified_request["networkPolicyId"] == request_init["network_policy_id"] + assert "vmwareEngineNetworkId" in jsonified_request + assert ( + jsonified_request["vmwareEngineNetworkId"] + == request_init["vmware_engine_network_id"] + ) jsonified_request["parent"] = "parent_value" - jsonified_request["networkPolicyId"] = "network_policy_id_value" + jsonified_request["vmwareEngineNetworkId"] = "vmware_engine_network_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_network_policy._get_unset_required_fields(jsonified_request) + ).create_vmware_engine_network._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "network_policy_id", "request_id", + "vmware_engine_network_id", ) ) jsonified_request.update(unset_fields) @@ -20489,8 +42859,10 @@ def test_create_network_policy_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "networkPolicyId" in jsonified_request - assert jsonified_request["networkPolicyId"] == "network_policy_id_value" + assert "vmwareEngineNetworkId" in jsonified_request + assert ( + jsonified_request["vmwareEngineNetworkId"] == "vmware_engine_network_id_value" + ) client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20524,11 +42896,11 @@ def test_create_network_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_network_policy(request) + response = client.create_vmware_engine_network(request) expected_params = [ ( - "networkPolicyId", + "vmwareEngineNetworkId", "", ), ("$alt", "json;enum-encoding=int"), @@ -20537,31 +42909,31 @@ def test_create_network_policy_rest_required_fields( assert expected_params == actual_params -def test_create_network_policy_rest_unset_required_fields(): +def test_create_vmware_engine_network_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_network_policy._get_unset_required_fields({}) + unset_fields = transport.create_vmware_engine_network._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "networkPolicyId", "requestId", + "vmwareEngineNetworkId", ) ) & set( ( "parent", - "networkPolicyId", - "networkPolicy", + "vmwareEngineNetworkId", + "vmwareEngineNetwork", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_network_policy_rest_interceptors(null_interceptor): +def test_create_vmware_engine_network_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20576,14 +42948,14 @@ def test_create_network_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_network_policy" + transports.VmwareEngineRestInterceptor, "post_create_vmware_engine_network" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_network_policy" + transports.VmwareEngineRestInterceptor, "pre_create_vmware_engine_network" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.CreateNetworkPolicyRequest.pb( - vmwareengine.CreateNetworkPolicyRequest() + pb_message = vmwareengine.CreateVmwareEngineNetworkRequest.pb( + vmwareengine.CreateVmwareEngineNetworkRequest() ) transcode.return_value = { "method": "post", @@ -20599,7 +42971,7 @@ def test_create_network_policy_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.CreateNetworkPolicyRequest() + request = vmwareengine.CreateVmwareEngineNetworkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20607,7 +42979,7 @@ def test_create_network_policy_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_network_policy( + client.create_vmware_engine_network( request, metadata=[ ("key", "val"), @@ -20619,8 +42991,8 @@ def test_create_network_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_network_policy_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreateNetworkPolicyRequest +def test_create_vmware_engine_network_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreateVmwareEngineNetworkRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20640,10 +43012,10 @@ def test_create_network_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_network_policy(request) + client.create_vmware_engine_network(request) -def test_create_network_policy_rest_flattened(): +def test_create_vmware_engine_network_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20660,8 +43032,10 @@ def test_create_network_policy_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", ) mock_args.update(sample_request) @@ -20672,20 +43046,20 @@ def test_create_network_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_network_policy(**mock_args) + client.create_vmware_engine_network(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/networkPolicies" + "%s/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks" % client.transport._host, args[1], ) -def test_create_network_policy_rest_flattened_error(transport: str = "rest"): +def test_create_vmware_engine_network_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20694,15 +43068,17 @@ def test_create_network_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_network_policy( - vmwareengine.CreateNetworkPolicyRequest(), + client.create_vmware_engine_network( + vmwareengine.CreateVmwareEngineNetworkRequest(), parent="parent_value", - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), - network_policy_id="network_policy_id_value", + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), + vmware_engine_network_id="vmware_engine_network_id_value", ) -def test_create_network_policy_rest_error(): +def test_create_vmware_engine_network_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20711,11 +43087,11 @@ def test_create_network_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateNetworkPolicyRequest, + vmwareengine.UpdateVmwareEngineNetworkRequest, dict, ], ) -def test_update_network_policy_rest(request_type): +def test_update_vmware_engine_network_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20723,28 +43099,29 @@ def test_update_network_policy_rest(request_type): # send a request that will satisfy transcoding request_init = { - "network_policy": { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "vmware_engine_network": { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } } - request_init["network_policy"] = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3", + request_init["vmware_engine_network"] = { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "internet_access": {"enabled": True, "state": 1}, - "external_ip": {}, - "edge_services_cidr": "edge_services_cidr_value", - "uid": "uid_value", - "vmware_engine_network": "vmware_engine_network_value", "description": "description_value", - "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", + "vpc_networks": [{"type_": 1, "network": "network_value"}], + "state": 1, + "type_": 1, + "uid": "uid_value", + "etag": "etag_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdateNetworkPolicyRequest.meta.fields["network_policy"] + test_field = vmwareengine.UpdateVmwareEngineNetworkRequest.meta.fields[ + "vmware_engine_network" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20772,7 +43149,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["network_policy"].items(): # pragma: NO COVER + for field, value in request_init[ + "vmware_engine_network" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20802,10 +43181,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["network_policy"][field])): - del request_init["network_policy"][field][i][subfield] + for i in range(0, len(request_init["vmware_engine_network"][field])): + del request_init["vmware_engine_network"][field][i][subfield] else: - del request_init["network_policy"][field][subfield] + del request_init["vmware_engine_network"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20820,14 +43199,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_network_policy(request) + response = client.update_vmware_engine_network(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_network_policy_rest_required_fields( - request_type=vmwareengine.UpdateNetworkPolicyRequest, +def test_update_vmware_engine_network_rest_required_fields( + request_type=vmwareengine.UpdateVmwareEngineNetworkRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -20846,14 +43225,14 @@ def test_update_network_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_network_policy._get_unset_required_fields(jsonified_request) + ).update_vmware_engine_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_network_policy._get_unset_required_fields(jsonified_request) + ).update_vmware_engine_network._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -20897,19 +43276,19 @@ def test_update_network_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_network_policy(request) + response = client.update_vmware_engine_network(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_network_policy_rest_unset_required_fields(): +def test_update_vmware_engine_network_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_network_policy._get_unset_required_fields({}) + unset_fields = transport.update_vmware_engine_network._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -20919,7 +43298,7 @@ def test_update_network_policy_rest_unset_required_fields(): ) & set( ( - "networkPolicy", + "vmwareEngineNetwork", "updateMask", ) ) @@ -20927,7 +43306,7 @@ def test_update_network_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_network_policy_rest_interceptors(null_interceptor): +def test_update_vmware_engine_network_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20942,14 +43321,14 @@ def test_update_network_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_network_policy" + transports.VmwareEngineRestInterceptor, "post_update_vmware_engine_network" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_network_policy" + transports.VmwareEngineRestInterceptor, "pre_update_vmware_engine_network" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdateNetworkPolicyRequest.pb( - vmwareengine.UpdateNetworkPolicyRequest() + pb_message = vmwareengine.UpdateVmwareEngineNetworkRequest.pb( + vmwareengine.UpdateVmwareEngineNetworkRequest() ) transcode.return_value = { "method": "post", @@ -20965,7 +43344,7 @@ def test_update_network_policy_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.UpdateNetworkPolicyRequest() + request = vmwareengine.UpdateVmwareEngineNetworkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20973,7 +43352,7 @@ def test_update_network_policy_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_network_policy( + client.update_vmware_engine_network( request, metadata=[ ("key", "val"), @@ -20985,8 +43364,8 @@ def test_update_network_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_network_policy_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdateNetworkPolicyRequest +def test_update_vmware_engine_network_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdateVmwareEngineNetworkRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20995,8 +43374,8 @@ def test_update_network_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "network_policy": { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "vmware_engine_network": { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } } request = request_type(**request_init) @@ -21010,10 +43389,10 @@ def test_update_network_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_network_policy(request) + client.update_vmware_engine_network(request) -def test_update_network_policy_rest_flattened(): +def test_update_vmware_engine_network_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21026,14 +43405,16 @@ def test_update_network_policy_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "network_policy": { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "vmware_engine_network": { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } } # get truthy value for each flattened field mock_args = dict( - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -21045,20 +43426,20 @@ def test_update_network_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_network_policy(**mock_args) + client.update_vmware_engine_network(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{network_policy.name=projects/*/locations/*/networkPolicies/*}" + "%s/v1/{vmware_engine_network.name=projects/*/locations/*/vmwareEngineNetworks/*}" % client.transport._host, args[1], ) -def test_update_network_policy_rest_flattened_error(transport: str = "rest"): +def test_update_vmware_engine_network_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21067,14 +43448,16 @@ def test_update_network_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_network_policy( - vmwareengine.UpdateNetworkPolicyRequest(), - network_policy=vmwareengine_resources.NetworkPolicy(name="name_value"), + client.update_vmware_engine_network( + vmwareengine.UpdateVmwareEngineNetworkRequest(), + vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_network_policy_rest_error(): +def test_update_vmware_engine_network_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21083,11 +43466,11 @@ def test_update_network_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeleteNetworkPolicyRequest, + vmwareengine.DeleteVmwareEngineNetworkRequest, dict, ], ) -def test_delete_network_policy_rest(request_type): +def test_delete_vmware_engine_network_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21095,7 +43478,7 @@ def test_delete_network_policy_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } request = request_type(**request_init) @@ -21111,14 +43494,14 @@ def test_delete_network_policy_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_network_policy(request) + response = client.delete_vmware_engine_network(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_network_policy_rest_required_fields( - request_type=vmwareengine.DeleteNetworkPolicyRequest, +def test_delete_vmware_engine_network_rest_required_fields( + request_type=vmwareengine.DeleteVmwareEngineNetworkRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -21138,7 +43521,7 @@ def test_delete_network_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_network_policy._get_unset_required_fields(jsonified_request) + ).delete_vmware_engine_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21147,9 +43530,14 @@ def test_delete_network_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_network_policy._get_unset_required_fields(jsonified_request) + ).delete_vmware_engine_network._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "etag", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -21187,24 +43575,32 @@ def test_delete_network_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_network_policy(request) + response = client.delete_vmware_engine_network(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_network_policy_rest_unset_required_fields(): +def test_delete_vmware_engine_network_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_network_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.delete_vmware_engine_network._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "requestId", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_network_policy_rest_interceptors(null_interceptor): +def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21219,14 +43615,14 @@ def test_delete_network_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_delete_network_policy" + transports.VmwareEngineRestInterceptor, "post_delete_vmware_engine_network" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_delete_network_policy" + transports.VmwareEngineRestInterceptor, "pre_delete_vmware_engine_network" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.DeleteNetworkPolicyRequest.pb( - vmwareengine.DeleteNetworkPolicyRequest() + pb_message = vmwareengine.DeleteVmwareEngineNetworkRequest.pb( + vmwareengine.DeleteVmwareEngineNetworkRequest() ) transcode.return_value = { "method": "post", @@ -21242,7 +43638,7 @@ def test_delete_network_policy_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.DeleteNetworkPolicyRequest() + request = vmwareengine.DeleteVmwareEngineNetworkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21250,7 +43646,7 @@ def test_delete_network_policy_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_network_policy( + client.delete_vmware_engine_network( request, metadata=[ ("key", "val"), @@ -21262,8 +43658,8 @@ def test_delete_network_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_network_policy_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.DeleteNetworkPolicyRequest +def test_delete_vmware_engine_network_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeleteVmwareEngineNetworkRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21272,7 +43668,7 @@ def test_delete_network_policy_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } request = request_type(**request_init) @@ -21285,10 +43681,10 @@ def test_delete_network_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_network_policy(request) + client.delete_vmware_engine_network(request) -def test_delete_network_policy_rest_flattened(): +def test_delete_vmware_engine_network_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21301,7 +43697,7 @@ def test_delete_network_policy_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/networkPolicies/sample3" + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } # get truthy value for each flattened field @@ -21317,20 +43713,20 @@ def test_delete_network_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_network_policy(**mock_args) + client.delete_vmware_engine_network(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/networkPolicies/*}" + "%s/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}" % client.transport._host, args[1], ) -def test_delete_network_policy_rest_flattened_error(transport: str = "rest"): +def test_delete_vmware_engine_network_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21339,13 +43735,13 @@ def test_delete_network_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_network_policy( - vmwareengine.DeleteNetworkPolicyRequest(), + client.delete_vmware_engine_network( + vmwareengine.DeleteVmwareEngineNetworkRequest(), name="name_value", ) -def test_delete_network_policy_rest_error(): +def test_delete_vmware_engine_network_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21354,128 +43750,62 @@ def test_delete_network_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreateVmwareEngineNetworkRequest, + vmwareengine.GetVmwareEngineNetworkRequest, dict, ], ) -def test_create_vmware_engine_network_rest(request_type): +def test_get_vmware_engine_network_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["vmware_engine_network"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "vpc_networks": [{"type_": 1, "network": "network_value"}], - "state": 1, - "type_": 1, - "uid": "uid_value", - "etag": "etag_value", + request_init = { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreateVmwareEngineNetworkRequest.meta.fields[ - "vmware_engine_network" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "vmware_engine_network" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["vmware_engine_network"][field])): - del request_init["vmware_engine_network"][field][i][subfield] - else: - del request_init["vmware_engine_network"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.VmwareEngineNetwork( + name="name_value", + description="description_value", + state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, + type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, + uid="uid_value", + etag="etag_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_vmware_engine_network(request) + response = client.get_vmware_engine_network(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING + assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY + assert response.uid == "uid_value" + assert response.etag == "etag_value" -def test_create_vmware_engine_network_rest_required_fields( - request_type=vmwareengine.CreateVmwareEngineNetworkRequest, +def test_get_vmware_engine_network_rest_required_fields( + request_type=vmwareengine.GetVmwareEngineNetworkRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" - request_init["vmware_engine_network_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21487,42 +43817,24 @@ def test_create_vmware_engine_network_rest_required_fields( ) # verify fields with default values are dropped - assert "vmwareEngineNetworkId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).get_vmware_engine_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "vmwareEngineNetworkId" in jsonified_request - assert ( - jsonified_request["vmwareEngineNetworkId"] - == request_init["vmware_engine_network_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["vmwareEngineNetworkId"] = "vmware_engine_network_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_vmware_engine_network._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "vmware_engine_network_id", - ) - ) + ).get_vmware_engine_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "vmwareEngineNetworkId" in jsonified_request - assert ( - jsonified_request["vmwareEngineNetworkId"] == "vmware_engine_network_id_value" - ) + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21531,7 +43843,7 @@ def test_create_vmware_engine_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.VmwareEngineNetwork() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21543,57 +43855,39 @@ def test_create_vmware_engine_network_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_vmware_engine_network(request) + response = client.get_vmware_engine_network(request) - expected_params = [ - ( - "vmwareEngineNetworkId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_vmware_engine_network_rest_unset_required_fields(): +def test_get_vmware_engine_network_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_vmware_engine_network._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "vmwareEngineNetworkId", - ) - ) - & set( - ( - "parent", - "vmwareEngineNetworkId", - "vmwareEngineNetwork", - ) - ) - ) + unset_fields = transport.get_vmware_engine_network._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_vmware_engine_network_rest_interceptors(null_interceptor): +def test_get_vmware_engine_network_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21606,16 +43900,14 @@ def test_create_vmware_engine_network_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "post_get_vmware_engine_network" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "pre_get_vmware_engine_network" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.CreateVmwareEngineNetworkRequest.pb( - vmwareengine.CreateVmwareEngineNetworkRequest() + pb_message = vmwareengine.GetVmwareEngineNetworkRequest.pb( + vmwareengine.GetVmwareEngineNetworkRequest() ) transcode.return_value = { "method": "post", @@ -21627,19 +43919,19 @@ def test_create_vmware_engine_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine_resources.VmwareEngineNetwork.to_json( + vmwareengine_resources.VmwareEngineNetwork() ) - request = vmwareengine.CreateVmwareEngineNetworkRequest() + request = vmwareengine.GetVmwareEngineNetworkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.VmwareEngineNetwork() - client.create_vmware_engine_network( + client.get_vmware_engine_network( request, metadata=[ ("key", "val"), @@ -21651,8 +43943,8 @@ def test_create_vmware_engine_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_vmware_engine_network_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreateVmwareEngineNetworkRequest +def test_get_vmware_engine_network_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetVmwareEngineNetworkRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21660,7 +43952,9 @@ def test_create_vmware_engine_network_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21672,10 +43966,10 @@ def test_create_vmware_engine_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_vmware_engine_network(request) + client.get_vmware_engine_network(request) -def test_create_vmware_engine_network_rest_flattened(): +def test_get_vmware_engine_network_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21684,42 +43978,42 @@ def test_create_vmware_engine_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.VmwareEngineNetwork() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_vmware_engine_network(**mock_args) + client.get_vmware_engine_network(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks" + "%s/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}" % client.transport._host, args[1], ) -def test_create_vmware_engine_network_rest_flattened_error(transport: str = "rest"): +def test_get_vmware_engine_network_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21728,17 +44022,13 @@ def test_create_vmware_engine_network_rest_flattened_error(transport: str = "res # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_vmware_engine_network( - vmwareengine.CreateVmwareEngineNetworkRequest(), - parent="parent_value", - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - vmware_engine_network_id="vmware_engine_network_id_value", + client.get_vmware_engine_network( + vmwareengine.GetVmwareEngineNetworkRequest(), + name="name_value", ) -def test_create_vmware_engine_network_rest_error(): +def test_get_vmware_engine_network_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21747,130 +44037,52 @@ def test_create_vmware_engine_network_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdateVmwareEngineNetworkRequest, + vmwareengine.ListVmwareEngineNetworksRequest, dict, ], ) -def test_update_vmware_engine_network_rest(request_type): +def test_list_vmware_engine_networks_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "vmware_engine_network": { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" - } - } - request_init["vmware_engine_network"] = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "vpc_networks": [{"type_": 1, "network": "network_value"}], - "state": 1, - "type_": 1, - "uid": "uid_value", - "etag": "etag_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdateVmwareEngineNetworkRequest.meta.fields[ - "vmware_engine_network" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "vmware_engine_network" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["vmware_engine_network"][field])): - del request_init["vmware_engine_network"][field][i][subfield] - else: - del request_init["vmware_engine_network"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListVmwareEngineNetworksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_vmware_engine_network(request) + response = client.list_vmware_engine_networks(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListVmwareEngineNetworksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_vmware_engine_network_rest_required_fields( - request_type=vmwareengine.UpdateVmwareEngineNetworkRequest, +def test_list_vmware_engine_networks_rest_required_fields( + request_type=vmwareengine.ListVmwareEngineNetworksRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -21885,24 +44097,30 @@ def test_update_vmware_engine_network_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).list_vmware_engine_networks._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).list_vmware_engine_networks._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21911,7 +44129,7 @@ def test_update_vmware_engine_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListVmwareEngineNetworksResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -21923,50 +44141,51 @@ def test_update_vmware_engine_network_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_vmware_engine_network(request) + response = client.list_vmware_engine_networks(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_vmware_engine_network_rest_unset_required_fields(): +def test_list_vmware_engine_networks_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_vmware_engine_network._get_unset_required_fields({}) + unset_fields = transport.list_vmware_engine_networks._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "vmwareEngineNetwork", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_vmware_engine_network_rest_interceptors(null_interceptor): +def test_list_vmware_engine_networks_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21979,16 +44198,14 @@ def test_update_vmware_engine_network_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "post_list_vmware_engine_networks" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "pre_list_vmware_engine_networks" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdateVmwareEngineNetworkRequest.pb( - vmwareengine.UpdateVmwareEngineNetworkRequest() + pb_message = vmwareengine.ListVmwareEngineNetworksRequest.pb( + vmwareengine.ListVmwareEngineNetworksRequest() ) transcode.return_value = { "method": "post", @@ -22000,19 +44217,21 @@ def test_update_vmware_engine_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + vmwareengine.ListVmwareEngineNetworksResponse.to_json( + vmwareengine.ListVmwareEngineNetworksResponse() + ) ) - request = vmwareengine.UpdateVmwareEngineNetworkRequest() + request = vmwareengine.ListVmwareEngineNetworksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine.ListVmwareEngineNetworksResponse() - client.update_vmware_engine_network( + client.list_vmware_engine_networks( request, metadata=[ ("key", "val"), @@ -22024,8 +44243,8 @@ def test_update_vmware_engine_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_vmware_engine_network_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdateVmwareEngineNetworkRequest +def test_list_vmware_engine_networks_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListVmwareEngineNetworksRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22033,11 +44252,7 @@ def test_update_vmware_engine_network_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "vmware_engine_network": { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22049,10 +44264,10 @@ def test_update_vmware_engine_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_vmware_engine_network(request) + client.list_vmware_engine_networks(request) -def test_update_vmware_engine_network_rest_flattened(): +def test_list_vmware_engine_networks_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22061,45 +44276,40 @@ def test_update_vmware_engine_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine.ListVmwareEngineNetworksResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "vmware_engine_network": { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_vmware_engine_network(**mock_args) + client.list_vmware_engine_networks(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{vmware_engine_network.name=projects/*/locations/*/vmwareEngineNetworks/*}" + "%s/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks" % client.transport._host, args[1], ) -def test_update_vmware_engine_network_rest_flattened_error(transport: str = "rest"): +def test_list_vmware_engine_networks_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22108,38 +44318,176 @@ def test_update_vmware_engine_network_rest_flattened_error(transport: str = "res # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_vmware_engine_network( - vmwareengine.UpdateVmwareEngineNetworkRequest(), - vmware_engine_network=vmwareengine_resources.VmwareEngineNetwork( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_vmware_engine_networks( + vmwareengine.ListVmwareEngineNetworksRequest(), + parent="parent_value", ) -def test_update_vmware_engine_network_rest_error(): +def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="abc", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[], + next_page_token="def", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + ], + next_page_token="ghi", + ), + vmwareengine.ListVmwareEngineNetworksResponse( + vmware_engine_networks=[ + vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine_resources.VmwareEngineNetwork(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vmwareengine.ListVmwareEngineNetworksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_vmware_engine_networks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results + ) + + pages = list(client.list_vmware_engine_networks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeleteVmwareEngineNetworkRequest, + vmwareengine.CreatePrivateConnectionRequest, dict, ], ) -def test_delete_vmware_engine_network_rest(request_type): +def test_create_private_connection_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_connection"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "state": 1, + "vmware_engine_network": "vmware_engine_network_value", + "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", + "type_": 1, + "peering_id": "peering_id_value", + "routing_mode": 1, + "uid": "uid_value", + "service_network": "service_network_value", + "peering_state": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vmwareengine.CreatePrivateConnectionRequest.meta.fields[ + "private_connection" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["private_connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["private_connection"][field])): + del request_init["private_connection"][field][i][subfield] + else: + del request_init["private_connection"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22154,19 +44502,20 @@ def test_delete_vmware_engine_network_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_vmware_engine_network(request) + response = client.create_private_connection(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_vmware_engine_network_rest_required_fields( - request_type=vmwareengine.DeleteVmwareEngineNetworkRequest, +def test_create_private_connection_rest_required_fields( + request_type=vmwareengine.CreatePrivateConnectionRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["private_connection_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22178,31 +44527,40 @@ def test_delete_vmware_engine_network_rest_required_fields( ) # verify fields with default values are dropped + assert "privateConnectionId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).create_private_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "privateConnectionId" in jsonified_request + assert ( + jsonified_request["privateConnectionId"] + == request_init["private_connection_id"] + ) - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["privateConnectionId"] = "private_connection_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).create_private_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "etag", + "private_connection_id", "request_id", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "privateConnectionId" in jsonified_request + assert jsonified_request["privateConnectionId"] == "private_connection_id_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22223,9 +44581,10 @@ def test_delete_vmware_engine_network_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -22235,32 +44594,44 @@ def test_delete_vmware_engine_network_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_vmware_engine_network(request) + response = client.create_private_connection(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "privateConnectionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_vmware_engine_network_rest_unset_required_fields(): +def test_create_private_connection_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_vmware_engine_network._get_unset_required_fields({}) + unset_fields = transport.create_private_connection._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "etag", + "privateConnectionId", "requestId", ) ) - & set(("name",)) + & set( + ( + "parent", + "privateConnectionId", + "privateConnection", + ) + ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): +def test_create_private_connection_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22275,14 +44646,14 @@ def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_delete_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "post_create_private_connection" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_delete_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "pre_create_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.DeleteVmwareEngineNetworkRequest.pb( - vmwareengine.DeleteVmwareEngineNetworkRequest() + pb_message = vmwareengine.CreatePrivateConnectionRequest.pb( + vmwareengine.CreatePrivateConnectionRequest() ) transcode.return_value = { "method": "post", @@ -22298,7 +44669,7 @@ def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.DeleteVmwareEngineNetworkRequest() + request = vmwareengine.CreatePrivateConnectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22306,7 +44677,7 @@ def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_vmware_engine_network( + client.create_private_connection( request, metadata=[ ("key", "val"), @@ -22318,8 +44689,8 @@ def test_delete_vmware_engine_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_vmware_engine_network_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.DeleteVmwareEngineNetworkRequest +def test_create_private_connection_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.CreatePrivateConnectionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22327,9 +44698,7 @@ def test_delete_vmware_engine_network_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22341,10 +44710,10 @@ def test_delete_vmware_engine_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_vmware_engine_network(request) + client.create_private_connection(request) -def test_delete_vmware_engine_network_rest_flattened(): +def test_create_private_connection_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22356,13 +44725,15 @@ def test_delete_vmware_engine_network_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", ) mock_args.update(sample_request) @@ -22373,20 +44744,20 @@ def test_delete_vmware_engine_network_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_vmware_engine_network(**mock_args) + client.create_private_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}" + "%s/v1/{parent=projects/*/locations/*}/privateConnections" % client.transport._host, args[1], ) -def test_delete_vmware_engine_network_rest_flattened_error(transport: str = "rest"): +def test_create_private_connection_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22395,13 +44766,17 @@ def test_delete_vmware_engine_network_rest_flattened_error(transport: str = "res # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_vmware_engine_network( - vmwareengine.DeleteVmwareEngineNetworkRequest(), - name="name_value", + client.create_private_connection( + vmwareengine.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=vmwareengine_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", ) -def test_delete_vmware_engine_network_rest_error(): +def test_create_private_connection_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22410,11 +44785,11 @@ def test_delete_vmware_engine_network_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetVmwareEngineNetworkRequest, + vmwareengine.GetPrivateConnectionRequest, dict, ], ) -def test_get_vmware_engine_network_rest(request_type): +def test_get_private_connection_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22422,45 +44797,67 @@ def test_get_vmware_engine_network_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + "name": "projects/sample1/locations/sample2/privateConnections/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.VmwareEngineNetwork( + return_value = vmwareengine_resources.PrivateConnection( name="name_value", description="description_value", - state=vmwareengine_resources.VmwareEngineNetwork.State.CREATING, - type_=vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY, + state=vmwareengine_resources.PrivateConnection.State.CREATING, + vmware_engine_network="vmware_engine_network_value", + vmware_engine_network_canonical="vmware_engine_network_canonical_value", + type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, + peering_id="peering_id_value", + routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, uid="uid_value", - etag="etag_value", + service_network="service_network_value", + peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) + return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_vmware_engine_network(request) + response = client.get_private_connection(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.VmwareEngineNetwork) + assert isinstance(response, vmwareengine_resources.PrivateConnection) assert response.name == "name_value" assert response.description == "description_value" - assert response.state == vmwareengine_resources.VmwareEngineNetwork.State.CREATING - assert response.type_ == vmwareengine_resources.VmwareEngineNetwork.Type.LEGACY + assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING + assert response.vmware_engine_network == "vmware_engine_network_value" + assert ( + response.vmware_engine_network_canonical + == "vmware_engine_network_canonical_value" + ) + assert ( + response.type_ + == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS + ) + assert response.peering_id == "peering_id_value" + assert ( + response.routing_mode + == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL + ) assert response.uid == "uid_value" - assert response.etag == "etag_value" + assert response.service_network == "service_network_value" + assert ( + response.peering_state + == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE + ) -def test_get_vmware_engine_network_rest_required_fields( - request_type=vmwareengine.GetVmwareEngineNetworkRequest, +def test_get_private_connection_rest_required_fields( + request_type=vmwareengine.GetPrivateConnectionRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -22480,7 +44877,7 @@ def test_get_vmware_engine_network_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).get_private_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -22489,7 +44886,7 @@ def test_get_vmware_engine_network_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vmware_engine_network._get_unset_required_fields(jsonified_request) + ).get_private_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -22503,7 +44900,7 @@ def test_get_vmware_engine_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.VmwareEngineNetwork() + return_value = vmwareengine_resources.PrivateConnection() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22524,30 +44921,30 @@ def test_get_vmware_engine_network_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) + return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_vmware_engine_network(request) + response = client.get_private_connection(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_vmware_engine_network_rest_unset_required_fields(): +def test_get_private_connection_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_vmware_engine_network._get_unset_required_fields({}) + unset_fields = transport.get_private_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_vmware_engine_network_rest_interceptors(null_interceptor): +def test_get_private_connection_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22560,14 +44957,14 @@ def test_get_vmware_engine_network_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "post_get_private_connection" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_vmware_engine_network" + transports.VmwareEngineRestInterceptor, "pre_get_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetVmwareEngineNetworkRequest.pb( - vmwareengine.GetVmwareEngineNetworkRequest() + pb_message = vmwareengine.GetPrivateConnectionRequest.pb( + vmwareengine.GetPrivateConnectionRequest() ) transcode.return_value = { "method": "post", @@ -22579,19 +44976,19 @@ def test_get_vmware_engine_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.VmwareEngineNetwork.to_json( - vmwareengine_resources.VmwareEngineNetwork() + req.return_value._content = vmwareengine_resources.PrivateConnection.to_json( + vmwareengine_resources.PrivateConnection() ) - request = vmwareengine.GetVmwareEngineNetworkRequest() + request = vmwareengine.GetPrivateConnectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.VmwareEngineNetwork() + post.return_value = vmwareengine_resources.PrivateConnection() - client.get_vmware_engine_network( + client.get_private_connection( request, metadata=[ ("key", "val"), @@ -22603,8 +45000,8 @@ def test_get_vmware_engine_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_vmware_engine_network_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetVmwareEngineNetworkRequest +def test_get_private_connection_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetPrivateConnectionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22613,7 +45010,7 @@ def test_get_vmware_engine_network_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + "name": "projects/sample1/locations/sample2/privateConnections/sample3" } request = request_type(**request_init) @@ -22626,10 +45023,10 @@ def test_get_vmware_engine_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_vmware_engine_network(request) + client.get_private_connection(request) -def test_get_vmware_engine_network_rest_flattened(): +def test_get_private_connection_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22638,11 +45035,11 @@ def test_get_vmware_engine_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.VmwareEngineNetwork() + return_value = vmwareengine_resources.PrivateConnection() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/vmwareEngineNetworks/sample3" + "name": "projects/sample1/locations/sample2/privateConnections/sample3" } # get truthy value for each flattened field @@ -22655,25 +45052,25 @@ def test_get_vmware_engine_network_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine_resources.VmwareEngineNetwork.pb(return_value) + return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_vmware_engine_network(**mock_args) + client.get_private_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/vmwareEngineNetworks/*}" + "%s/v1/{name=projects/*/locations/*/privateConnections/*}" % client.transport._host, args[1], ) -def test_get_vmware_engine_network_rest_flattened_error(transport: str = "rest"): +def test_get_private_connection_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22682,13 +45079,13 @@ def test_get_vmware_engine_network_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_vmware_engine_network( - vmwareengine.GetVmwareEngineNetworkRequest(), + client.get_private_connection( + vmwareengine.GetPrivateConnectionRequest(), name="name_value", ) -def test_get_vmware_engine_network_rest_error(): +def test_get_private_connection_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22697,11 +45094,11 @@ def test_get_vmware_engine_network_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListVmwareEngineNetworksRequest, + vmwareengine.ListPrivateConnectionsRequest, dict, ], ) -def test_list_vmware_engine_networks_rest(request_type): +def test_list_private_connections_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22714,7 +45111,7 @@ def test_list_vmware_engine_networks_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListVmwareEngineNetworksResponse( + return_value = vmwareengine.ListPrivateConnectionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -22723,21 +45120,21 @@ def test_list_vmware_engine_networks_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb(return_value) + return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_vmware_engine_networks(request) + response = client.list_private_connections(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVmwareEngineNetworksPager) + assert isinstance(response, pagers.ListPrivateConnectionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_vmware_engine_networks_rest_required_fields( - request_type=vmwareengine.ListVmwareEngineNetworksRequest, +def test_list_private_connections_rest_required_fields( + request_type=vmwareengine.ListPrivateConnectionsRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -22757,7 +45154,7 @@ def test_list_vmware_engine_networks_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_vmware_engine_networks._get_unset_required_fields(jsonified_request) + ).list_private_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -22766,7 +45163,7 @@ def test_list_vmware_engine_networks_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_vmware_engine_networks._get_unset_required_fields(jsonified_request) + ).list_private_connections._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -22789,7 +45186,7 @@ def test_list_vmware_engine_networks_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListVmwareEngineNetworksResponse() + return_value = vmwareengine.ListPrivateConnectionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22810,27 +45207,25 @@ def test_list_vmware_engine_networks_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb( - return_value - ) + return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_vmware_engine_networks(request) + response = client.list_private_connections(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_vmware_engine_networks_rest_unset_required_fields(): +def test_list_private_connections_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_vmware_engine_networks._get_unset_required_fields({}) + unset_fields = transport.list_private_connections._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -22845,7 +45240,7 @@ def test_list_vmware_engine_networks_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_vmware_engine_networks_rest_interceptors(null_interceptor): +def test_list_private_connections_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22858,14 +45253,14 @@ def test_list_vmware_engine_networks_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_vmware_engine_networks" + transports.VmwareEngineRestInterceptor, "post_list_private_connections" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_vmware_engine_networks" + transports.VmwareEngineRestInterceptor, "pre_list_private_connections" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListVmwareEngineNetworksRequest.pb( - vmwareengine.ListVmwareEngineNetworksRequest() + pb_message = vmwareengine.ListPrivateConnectionsRequest.pb( + vmwareengine.ListPrivateConnectionsRequest() ) transcode.return_value = { "method": "post", @@ -22877,21 +45272,19 @@ def test_list_vmware_engine_networks_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - vmwareengine.ListVmwareEngineNetworksResponse.to_json( - vmwareengine.ListVmwareEngineNetworksResponse() - ) + req.return_value._content = vmwareengine.ListPrivateConnectionsResponse.to_json( + vmwareengine.ListPrivateConnectionsResponse() ) - request = vmwareengine.ListVmwareEngineNetworksRequest() + request = vmwareengine.ListPrivateConnectionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListVmwareEngineNetworksResponse() + post.return_value = vmwareengine.ListPrivateConnectionsResponse() - client.list_vmware_engine_networks( + client.list_private_connections( request, metadata=[ ("key", "val"), @@ -22903,8 +45296,8 @@ def test_list_vmware_engine_networks_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_vmware_engine_networks_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListVmwareEngineNetworksRequest +def test_list_private_connections_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.ListPrivateConnectionsRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22924,10 +45317,10 @@ def test_list_vmware_engine_networks_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_vmware_engine_networks(request) + client.list_private_connections(request) -def test_list_vmware_engine_networks_rest_flattened(): +def test_list_private_connections_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22936,7 +45329,7 @@ def test_list_vmware_engine_networks_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListVmwareEngineNetworksResponse() + return_value = vmwareengine.ListPrivateConnectionsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -22951,25 +45344,25 @@ def test_list_vmware_engine_networks_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListVmwareEngineNetworksResponse.pb(return_value) + return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_vmware_engine_networks(**mock_args) + client.list_private_connections(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/vmwareEngineNetworks" + "%s/v1/{parent=projects/*/locations/*}/privateConnections" % client.transport._host, args[1], ) -def test_list_vmware_engine_networks_rest_flattened_error(transport: str = "rest"): +def test_list_private_connections_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22978,13 +45371,13 @@ def test_list_vmware_engine_networks_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_vmware_engine_networks( - vmwareengine.ListVmwareEngineNetworksRequest(), + client.list_private_connections( + vmwareengine.ListPrivateConnectionsRequest(), parent="parent_value", ) -def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): +def test_list_private_connections_rest_pager(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22996,28 +45389,28 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), ], next_page_token="abc", ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[], + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[], next_page_token="def", ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), ], next_page_token="ghi", ), - vmwareengine.ListVmwareEngineNetworksResponse( - vmware_engine_networks=[ - vmwareengine_resources.VmwareEngineNetwork(), - vmwareengine_resources.VmwareEngineNetwork(), + vmwareengine.ListPrivateConnectionsResponse( + private_connections=[ + vmwareengine_resources.PrivateConnection(), + vmwareengine_resources.PrivateConnection(), ], ), ) @@ -23026,7 +45419,7 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - vmwareengine.ListVmwareEngineNetworksResponse.to_json(x) for x in response + vmwareengine.ListPrivateConnectionsResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -23036,15 +45429,15 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_vmware_engine_networks(request=sample_request) + pager = client.list_private_connections(request=sample_request) results = list(pager) assert len(results) == 6 assert all( - isinstance(i, vmwareengine_resources.VmwareEngineNetwork) for i in results + isinstance(i, vmwareengine_resources.PrivateConnection) for i in results ) - pages = list(client.list_vmware_engine_networks(request=sample_request).pages) + pages = list(client.list_private_connections(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -23052,20 +45445,24 @@ def test_list_vmware_engine_networks_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - vmwareengine.CreatePrivateConnectionRequest, + vmwareengine.UpdatePrivateConnectionRequest, dict, ], ) -def test_create_private_connection_rest(request_type): +def test_update_private_connection_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "private_connection": { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + } request_init["private_connection"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/privateConnections/sample3", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "description": "description_value", @@ -23084,7 +45481,7 @@ def test_create_private_connection_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.CreatePrivateConnectionRequest.meta.fields[ + test_field = vmwareengine.UpdatePrivateConnectionRequest.meta.fields[ "private_connection" ] @@ -23162,20 +45559,18 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_private_connection(request) + response = client.update_private_connection(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_private_connection_rest_required_fields( - request_type=vmwareengine.CreatePrivateConnectionRequest, +def test_update_private_connection_rest_required_fields( + request_type=vmwareengine.UpdatePrivateConnectionRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" - request_init["private_connection_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -23187,40 +45582,27 @@ def test_create_private_connection_rest_required_fields( ) # verify fields with default values are dropped - assert "privateConnectionId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_private_connection._get_unset_required_fields(jsonified_request) + ).update_private_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "privateConnectionId" in jsonified_request - assert ( - jsonified_request["privateConnectionId"] - == request_init["private_connection_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request["privateConnectionId"] = "private_connection_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_private_connection._get_unset_required_fields(jsonified_request) + ).update_private_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "private_connection_id", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "privateConnectionId" in jsonified_request - assert jsonified_request["privateConnectionId"] == "private_connection_id_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23241,7 +45623,7 @@ def test_create_private_connection_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -23254,44 +45636,37 @@ def test_create_private_connection_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_private_connection(request) + response = client.update_private_connection(request) - expected_params = [ - ( - "privateConnectionId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_private_connection_rest_unset_required_fields(): +def test_update_private_connection_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_private_connection._get_unset_required_fields({}) + unset_fields = transport.update_private_connection._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "privateConnectionId", "requestId", + "updateMask", ) ) & set( ( - "parent", - "privateConnectionId", "privateConnection", + "updateMask", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_private_connection_rest_interceptors(null_interceptor): +def test_update_private_connection_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23306,14 +45681,14 @@ def test_create_private_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_create_private_connection" + transports.VmwareEngineRestInterceptor, "post_update_private_connection" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_create_private_connection" + transports.VmwareEngineRestInterceptor, "pre_update_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.CreatePrivateConnectionRequest.pb( - vmwareengine.CreatePrivateConnectionRequest() + pb_message = vmwareengine.UpdatePrivateConnectionRequest.pb( + vmwareengine.UpdatePrivateConnectionRequest() ) transcode.return_value = { "method": "post", @@ -23329,7 +45704,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.CreatePrivateConnectionRequest() + request = vmwareengine.UpdatePrivateConnectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -23337,7 +45712,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_private_connection( + client.update_private_connection( request, metadata=[ ("key", "val"), @@ -23349,8 +45724,8 @@ def test_create_private_connection_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_private_connection_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.CreatePrivateConnectionRequest +def test_update_private_connection_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.UpdatePrivateConnectionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23358,7 +45733,11 @@ def test_create_private_connection_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "private_connection": { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23370,10 +45749,10 @@ def test_create_private_connection_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_private_connection(request) + client.update_private_connection(request) -def test_create_private_connection_rest_flattened(): +def test_update_private_connection_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23385,15 +45764,18 @@ def test_create_private_connection_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "private_connection": { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", private_connection=vmwareengine_resources.PrivateConnection( name="name_value" ), - private_connection_id="private_connection_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -23404,20 +45786,20 @@ def test_create_private_connection_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_private_connection(**mock_args) + client.update_private_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/privateConnections" + "%s/v1/{private_connection.name=projects/*/locations/*/privateConnections/*}" % client.transport._host, args[1], ) -def test_create_private_connection_rest_flattened_error(transport: str = "rest"): +def test_update_private_connection_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23426,17 +45808,16 @@ def test_create_private_connection_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_private_connection( - vmwareengine.CreatePrivateConnectionRequest(), - parent="parent_value", + client.update_private_connection( + vmwareengine.UpdatePrivateConnectionRequest(), private_connection=vmwareengine_resources.PrivateConnection( name="name_value" ), - private_connection_id="private_connection_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_private_connection_rest_error(): +def test_update_private_connection_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23445,11 +45826,11 @@ def test_create_private_connection_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.GetPrivateConnectionRequest, + vmwareengine.DeletePrivateConnectionRequest, dict, ], ) -def test_get_private_connection_rest(request_type): +def test_delete_private_connection_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23464,60 +45845,23 @@ def test_get_private_connection_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateConnection( - name="name_value", - description="description_value", - state=vmwareengine_resources.PrivateConnection.State.CREATING, - vmware_engine_network="vmware_engine_network_value", - vmware_engine_network_canonical="vmware_engine_network_canonical_value", - type_=vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS, - peering_id="peering_id_value", - routing_mode=vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL, - uid="uid_value", - service_network="service_network_value", - peering_state=vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_private_connection(request) + response = client.delete_private_connection(request) # Establish that the response is the type that we expect. - assert isinstance(response, vmwareengine_resources.PrivateConnection) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vmwareengine_resources.PrivateConnection.State.CREATING - assert response.vmware_engine_network == "vmware_engine_network_value" - assert ( - response.vmware_engine_network_canonical - == "vmware_engine_network_canonical_value" - ) - assert ( - response.type_ - == vmwareengine_resources.PrivateConnection.Type.PRIVATE_SERVICE_ACCESS - ) - assert response.peering_id == "peering_id_value" - assert ( - response.routing_mode - == vmwareengine_resources.PrivateConnection.RoutingMode.GLOBAL - ) - assert response.uid == "uid_value" - assert response.service_network == "service_network_value" - assert ( - response.peering_state - == vmwareengine_resources.PrivateConnection.PeeringState.PEERING_ACTIVE - ) + assert response.operation.name == "operations/spam" -def test_get_private_connection_rest_required_fields( - request_type=vmwareengine.GetPrivateConnectionRequest, +def test_delete_private_connection_rest_required_fields( + request_type=vmwareengine.DeletePrivateConnectionRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -23537,7 +45881,7 @@ def test_get_private_connection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_private_connection._get_unset_required_fields(jsonified_request) + ).delete_private_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23546,7 +45890,9 @@ def test_get_private_connection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_private_connection._get_unset_required_fields(jsonified_request) + ).delete_private_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -23560,7 +45906,7 @@ def test_get_private_connection_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateConnection() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23572,39 +45918,36 @@ def test_get_private_connection_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_private_connection(request) + response = client.delete_private_connection(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_private_connection_rest_unset_required_fields(): +def test_delete_private_connection_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_private_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_private_connection_rest_interceptors(null_interceptor): +def test_delete_private_connection_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23617,14 +45960,16 @@ def test_get_private_connection_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_get_private_connection" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_delete_private_connection" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_get_private_connection" + transports.VmwareEngineRestInterceptor, "pre_delete_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.GetPrivateConnectionRequest.pb( - vmwareengine.GetPrivateConnectionRequest() + pb_message = vmwareengine.DeletePrivateConnectionRequest.pb( + vmwareengine.DeletePrivateConnectionRequest() ) transcode.return_value = { "method": "post", @@ -23636,19 +45981,19 @@ def test_get_private_connection_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine_resources.PrivateConnection.to_json( - vmwareengine_resources.PrivateConnection() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.GetPrivateConnectionRequest() + request = vmwareengine.DeletePrivateConnectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine_resources.PrivateConnection() + post.return_value = operations_pb2.Operation() - client.get_private_connection( + client.delete_private_connection( request, metadata=[ ("key", "val"), @@ -23660,8 +46005,8 @@ def test_get_private_connection_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_private_connection_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.GetPrivateConnectionRequest +def test_delete_private_connection_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.DeletePrivateConnectionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23683,10 +46028,10 @@ def test_get_private_connection_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_private_connection(request) + client.delete_private_connection(request) -def test_get_private_connection_rest_flattened(): +def test_delete_private_connection_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23695,7 +46040,7 @@ def test_get_private_connection_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine_resources.PrivateConnection() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -23711,13 +46056,11 @@ def test_get_private_connection_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine_resources.PrivateConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_private_connection(**mock_args) + client.delete_private_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -23730,7 +46073,7 @@ def test_get_private_connection_rest_flattened(): ) -def test_get_private_connection_rest_flattened_error(transport: str = "rest"): +def test_delete_private_connection_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -23739,13 +46082,13 @@ def test_get_private_connection_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_private_connection( - vmwareengine.GetPrivateConnectionRequest(), + client.delete_private_connection( + vmwareengine.DeletePrivateConnectionRequest(), name="name_value", ) -def test_get_private_connection_rest_error(): +def test_delete_private_connection_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23754,47 +46097,49 @@ def test_get_private_connection_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListPrivateConnectionsRequest, + vmwareengine.ListPrivateConnectionPeeringRoutesRequest, dict, ], ) -def test_list_private_connections_rest(request_type): +def test_list_private_connection_peering_routes_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionsResponse( + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_connections(request) + response = client.list_private_connection_peering_routes(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - -def test_list_private_connections_rest_required_fields( - request_type=vmwareengine.ListPrivateConnectionsRequest, + +def test_list_private_connection_peering_routes_rest_required_fields( + request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -23814,7 +46159,9 @@ def test_list_private_connections_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_connections._get_unset_required_fields(jsonified_request) + ).list_private_connection_peering_routes._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -23823,12 +46170,12 @@ def test_list_private_connections_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_connections._get_unset_required_fields(jsonified_request) + ).list_private_connection_peering_routes._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", "page_size", "page_token", ) @@ -23846,7 +46193,7 @@ def test_list_private_connections_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionsResponse() + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -23867,30 +46214,32 @@ def test_list_private_connections_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_connections(request) + response = client.list_private_connection_peering_routes(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_private_connections_rest_unset_required_fields(): +def test_list_private_connection_peering_routes_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_private_connections._get_unset_required_fields({}) + unset_fields = ( + transport.list_private_connection_peering_routes._get_unset_required_fields({}) + ) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", "pageSize", "pageToken", ) @@ -23900,7 +46249,7 @@ def test_list_private_connections_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_private_connections_rest_interceptors(null_interceptor): +def test_list_private_connection_peering_routes_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23913,14 +46262,16 @@ def test_list_private_connections_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_list_private_connections" + transports.VmwareEngineRestInterceptor, + "post_list_private_connection_peering_routes", ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_list_private_connections" + transports.VmwareEngineRestInterceptor, + "pre_list_private_connection_peering_routes", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListPrivateConnectionsRequest.pb( - vmwareengine.ListPrivateConnectionsRequest() + pb_message = vmwareengine.ListPrivateConnectionPeeringRoutesRequest.pb( + vmwareengine.ListPrivateConnectionPeeringRoutesRequest() ) transcode.return_value = { "method": "post", @@ -23932,19 +46283,21 @@ def test_list_private_connections_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vmwareengine.ListPrivateConnectionsResponse.to_json( - vmwareengine.ListPrivateConnectionsResponse() + req.return_value._content = ( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse.to_json( + vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + ) ) - request = vmwareengine.ListPrivateConnectionsRequest() + request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListPrivateConnectionsResponse() + post.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() - client.list_private_connections( + client.list_private_connection_peering_routes( request, metadata=[ ("key", "val"), @@ -23956,8 +46309,9 @@ def test_list_private_connections_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_private_connections_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.ListPrivateConnectionsRequest +def test_list_private_connection_peering_routes_rest_bad_request( + transport: str = "rest", + request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23965,7 +46319,9 @@ def test_list_private_connections_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23977,10 +46333,10 @@ def test_list_private_connections_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_private_connections(request) + client.list_private_connection_peering_routes(request) -def test_list_private_connections_rest_flattened(): +def test_list_private_connection_peering_routes_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -23989,10 +46345,12 @@ def test_list_private_connections_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionsResponse() + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -24004,25 +46362,29 @@ def test_list_private_connections_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionsResponse.pb(return_value) + return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_private_connections(**mock_args) + client.list_private_connection_peering_routes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/privateConnections" + "%s/v1/{parent=projects/*/locations/*/privateConnections/*}/peeringRoutes" % client.transport._host, args[1], ) -def test_list_private_connections_rest_flattened_error(transport: str = "rest"): +def test_list_private_connection_peering_routes_rest_flattened_error( + transport: str = "rest", +): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24031,13 +46393,13 @@ def test_list_private_connections_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_private_connections( - vmwareengine.ListPrivateConnectionsRequest(), + client.list_private_connection_peering_routes( + vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), parent="parent_value", ) -def test_list_private_connections_rest_pager(transport: str = "rest"): +def test_list_private_connection_peering_routes_rest_pager(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24049,28 +46411,28 @@ def test_list_private_connections_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), ], next_page_token="abc", ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[], + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[], next_page_token="def", ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), ], next_page_token="ghi", ), - vmwareengine.ListPrivateConnectionsResponse( - private_connections=[ - vmwareengine_resources.PrivateConnection(), - vmwareengine_resources.PrivateConnection(), + vmwareengine.ListPrivateConnectionPeeringRoutesResponse( + peering_routes=[ + vmwareengine_resources.PeeringRoute(), + vmwareengine_resources.PeeringRoute(), ], ), ) @@ -24079,7 +46441,8 @@ def test_list_private_connections_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - vmwareengine.ListPrivateConnectionsResponse.to_json(x) for x in response + vmwareengine.ListPrivateConnectionPeeringRoutesResponse.to_json(x) + for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -24087,17 +46450,19 @@ def test_list_private_connections_rest_pager(transport: str = "rest"): return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } - pager = client.list_private_connections(request=sample_request) + pager = client.list_private_connection_peering_routes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all( - isinstance(i, vmwareengine_resources.PrivateConnection) for i in results - ) + assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) - pages = list(client.list_private_connections(request=sample_request).pages) + pages = list( + client.list_private_connection_peering_routes(request=sample_request).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -24105,106 +46470,18 @@ def test_list_private_connections_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - vmwareengine.UpdatePrivateConnectionRequest, + vmwareengine.GrantDnsBindPermissionRequest, dict, ], ) -def test_update_private_connection_rest(request_type): +def test_grant_dns_bind_permission_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "private_connection": { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" - } - } - request_init["private_connection"] = { - "name": "projects/sample1/locations/sample2/privateConnections/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "state": 1, - "vmware_engine_network": "vmware_engine_network_value", - "vmware_engine_network_canonical": "vmware_engine_network_canonical_value", - "type_": 1, - "peering_id": "peering_id_value", - "routing_mode": 1, - "uid": "uid_value", - "service_network": "service_network_value", - "peering_state": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vmwareengine.UpdatePrivateConnectionRequest.meta.fields[ - "private_connection" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["private_connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["private_connection"][field])): - del request_init["private_connection"][field][i][subfield] - else: - del request_init["private_connection"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -24219,18 +46496,19 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_private_connection(request) + response = client.grant_dns_bind_permission(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_private_connection_rest_required_fields( - request_type=vmwareengine.UpdatePrivateConnectionRequest, +def test_grant_dns_bind_permission_rest_required_fields( + request_type=vmwareengine.GrantDnsBindPermissionRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24245,24 +46523,21 @@ def test_update_private_connection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_private_connection._get_unset_required_fields(jsonified_request) + ).grant_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_private_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + ).grant_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24283,7 +46558,7 @@ def test_update_private_connection_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -24296,37 +46571,32 @@ def test_update_private_connection_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_private_connection(request) + response = client.grant_dns_bind_permission(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_private_connection_rest_unset_required_fields(): +def test_grant_dns_bind_permission_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_private_connection._get_unset_required_fields({}) + unset_fields = transport.grant_dns_bind_permission._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) + set(()) & set( ( - "privateConnection", - "updateMask", + "name", + "principal", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_private_connection_rest_interceptors(null_interceptor): +def test_grant_dns_bind_permission_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24341,14 +46611,14 @@ def test_update_private_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_update_private_connection" + transports.VmwareEngineRestInterceptor, "post_grant_dns_bind_permission" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_update_private_connection" + transports.VmwareEngineRestInterceptor, "pre_grant_dns_bind_permission" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.UpdatePrivateConnectionRequest.pb( - vmwareengine.UpdatePrivateConnectionRequest() + pb_message = vmwareengine.GrantDnsBindPermissionRequest.pb( + vmwareengine.GrantDnsBindPermissionRequest() ) transcode.return_value = { "method": "post", @@ -24364,7 +46634,7 @@ def test_update_private_connection_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = vmwareengine.UpdatePrivateConnectionRequest() + request = vmwareengine.GrantDnsBindPermissionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -24372,7 +46642,7 @@ def test_update_private_connection_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_private_connection( + client.grant_dns_bind_permission( request, metadata=[ ("key", "val"), @@ -24384,8 +46654,8 @@ def test_update_private_connection_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_private_connection_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.UpdatePrivateConnectionRequest +def test_grant_dns_bind_permission_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GrantDnsBindPermissionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24393,11 +46663,7 @@ def test_update_private_connection_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "private_connection": { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" - } - } + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24409,10 +46675,10 @@ def test_update_private_connection_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_private_connection(request) + client.grant_dns_bind_permission(request) -def test_update_private_connection_rest_flattened(): +def test_grant_dns_bind_permission_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24425,17 +46691,13 @@ def test_update_private_connection_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "private_connection": { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" - } + "name": "projects/sample1/locations/sample2/dnsBindPermission" } # get truthy value for each flattened field mock_args = dict( - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), ) mock_args.update(sample_request) @@ -24446,20 +46708,20 @@ def test_update_private_connection_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_private_connection(**mock_args) + client.grant_dns_bind_permission(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{private_connection.name=projects/*/locations/*/privateConnections/*}" + "%s/v1/{name=projects/*/locations/*/dnsBindPermission}:grant" % client.transport._host, args[1], ) -def test_update_private_connection_rest_flattened_error(transport: str = "rest"): +def test_grant_dns_bind_permission_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24468,16 +46730,14 @@ def test_update_private_connection_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_private_connection( - vmwareengine.UpdatePrivateConnectionRequest(), - private_connection=vmwareengine_resources.PrivateConnection( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.grant_dns_bind_permission( + vmwareengine.GrantDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), ) -def test_update_private_connection_rest_error(): +def test_grant_dns_bind_permission_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24486,42 +46746,45 @@ def test_update_private_connection_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.DeletePrivateConnectionRequest, + vmwareengine.GetDnsBindPermissionRequest, dict, ], ) -def test_delete_private_connection_rest(request_type): +def test_get_dns_bind_permission_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsBindPermission( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsBindPermission.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_private_connection(request) + response = client.get_dns_bind_permission(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, vmwareengine_resources.DnsBindPermission) + assert response.name == "name_value" -def test_delete_private_connection_rest_required_fields( - request_type=vmwareengine.DeletePrivateConnectionRequest, +def test_get_dns_bind_permission_rest_required_fields( + request_type=vmwareengine.GetDnsBindPermissionRequest, ): transport_class = transports.VmwareEngineRestTransport @@ -24541,7 +46804,7 @@ def test_delete_private_connection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_private_connection._get_unset_required_fields(jsonified_request) + ).get_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -24550,9 +46813,7 @@ def test_delete_private_connection_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_private_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -24566,7 +46827,7 @@ def test_delete_private_connection_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsBindPermission() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24578,36 +46839,39 @@ def test_delete_private_connection_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsBindPermission.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_private_connection(request) + response = client.get_dns_bind_permission(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_private_connection_rest_unset_required_fields(): +def test_get_dns_bind_permission_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_private_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.get_dns_bind_permission._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_private_connection_rest_interceptors(null_interceptor): +def test_get_dns_bind_permission_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24620,16 +46884,14 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VmwareEngineRestInterceptor, "post_delete_private_connection" + transports.VmwareEngineRestInterceptor, "post_get_dns_bind_permission" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, "pre_delete_private_connection" + transports.VmwareEngineRestInterceptor, "pre_get_dns_bind_permission" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.DeletePrivateConnectionRequest.pb( - vmwareengine.DeletePrivateConnectionRequest() + pb_message = vmwareengine.GetDnsBindPermissionRequest.pb( + vmwareengine.GetDnsBindPermissionRequest() ) transcode.return_value = { "method": "post", @@ -24641,19 +46903,19 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = vmwareengine_resources.DnsBindPermission.to_json( + vmwareengine_resources.DnsBindPermission() ) - request = vmwareengine.DeletePrivateConnectionRequest() + request = vmwareengine.GetDnsBindPermissionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = vmwareengine_resources.DnsBindPermission() - client.delete_private_connection( + client.get_dns_bind_permission( request, metadata=[ ("key", "val"), @@ -24665,8 +46927,8 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_private_connection_rest_bad_request( - transport: str = "rest", request_type=vmwareengine.DeletePrivateConnectionRequest +def test_get_dns_bind_permission_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.GetDnsBindPermissionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24674,9 +46936,7 @@ def test_delete_private_connection_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24688,10 +46948,10 @@ def test_delete_private_connection_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_private_connection(request) + client.get_dns_bind_permission(request) -def test_delete_private_connection_rest_flattened(): +def test_get_dns_bind_permission_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24700,11 +46960,11 @@ def test_delete_private_connection_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vmwareengine_resources.DnsBindPermission() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/privateConnections/sample3" + "name": "projects/sample1/locations/sample2/dnsBindPermission" } # get truthy value for each flattened field @@ -24716,24 +46976,26 @@ def test_delete_private_connection_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vmwareengine_resources.DnsBindPermission.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_private_connection(**mock_args) + client.get_dns_bind_permission(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/privateConnections/*}" + "%s/v1/{name=projects/*/locations/*/dnsBindPermission}" % client.transport._host, args[1], ) -def test_delete_private_connection_rest_flattened_error(transport: str = "rest"): +def test_get_dns_bind_permission_rest_flattened_error(transport: str = "rest"): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -24742,13 +47004,13 @@ def test_delete_private_connection_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_private_connection( - vmwareengine.DeletePrivateConnectionRequest(), + client.get_dns_bind_permission( + vmwareengine.GetDnsBindPermissionRequest(), name="name_value", ) -def test_delete_private_connection_rest_error(): +def test_get_dns_bind_permission_rest_error(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24757,54 +47019,45 @@ def test_delete_private_connection_rest_error(): @pytest.mark.parametrize( "request_type", [ - vmwareengine.ListPrivateConnectionPeeringRoutesRequest, + vmwareengine.RevokeDnsBindPermissionRequest, dict, ], ) -def test_list_private_connection_peering_routes_rest(request_type): +def test_revoke_dns_bind_permission_rest(request_type): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/privateConnections/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_connection_peering_routes(request) + response = client.revoke_dns_bind_permission(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPrivateConnectionPeeringRoutesPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_private_connection_peering_routes_rest_required_fields( - request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, +def test_revoke_dns_bind_permission_rest_required_fields( + request_type=vmwareengine.RevokeDnsBindPermissionRequest, ): transport_class = transports.VmwareEngineRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -24819,32 +47072,21 @@ def test_list_private_connection_peering_routes_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_connection_peering_routes._get_unset_required_fields( - jsonified_request - ) + ).revoke_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_private_connection_peering_routes._get_unset_required_fields( - jsonified_request - ) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).revoke_dns_bind_permission._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24853,7 +47095,7 @@ def test_list_private_connection_peering_routes_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -24865,51 +47107,45 @@ def test_list_private_connection_peering_routes_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_private_connection_peering_routes(request) + response = client.revoke_dns_bind_permission(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_private_connection_peering_routes_rest_unset_required_fields(): +def test_revoke_dns_bind_permission_rest_unset_required_fields(): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.list_private_connection_peering_routes._get_unset_required_fields({}) - ) + unset_fields = transport.revoke_dns_bind_permission._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "name", + "principal", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_private_connection_peering_routes_rest_interceptors(null_interceptor): +def test_revoke_dns_bind_permission_rest_interceptors(null_interceptor): transport = transports.VmwareEngineRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24922,16 +47158,16 @@ def test_list_private_connection_peering_routes_rest_interceptors(null_intercept ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VmwareEngineRestInterceptor, - "post_list_private_connection_peering_routes", + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VmwareEngineRestInterceptor, "post_revoke_dns_bind_permission" ) as post, mock.patch.object( - transports.VmwareEngineRestInterceptor, - "pre_list_private_connection_peering_routes", + transports.VmwareEngineRestInterceptor, "pre_revoke_dns_bind_permission" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = vmwareengine.ListPrivateConnectionPeeringRoutesRequest.pb( - vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + pb_message = vmwareengine.RevokeDnsBindPermissionRequest.pb( + vmwareengine.RevokeDnsBindPermissionRequest() ) transcode.return_value = { "method": "post", @@ -24943,21 +47179,19 @@ def test_list_private_connection_peering_routes_rest_interceptors(null_intercept req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse.to_json( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = vmwareengine.ListPrivateConnectionPeeringRoutesRequest() + request = vmwareengine.RevokeDnsBindPermissionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + post.return_value = operations_pb2.Operation() - client.list_private_connection_peering_routes( + client.revoke_dns_bind_permission( request, metadata=[ ("key", "val"), @@ -24969,9 +47203,8 @@ def test_list_private_connection_peering_routes_rest_interceptors(null_intercept post.assert_called_once() -def test_list_private_connection_peering_routes_rest_bad_request( - transport: str = "rest", - request_type=vmwareengine.ListPrivateConnectionPeeringRoutesRequest, +def test_revoke_dns_bind_permission_rest_bad_request( + transport: str = "rest", request_type=vmwareengine.RevokeDnsBindPermissionRequest ): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24979,9 +47212,7 @@ def test_list_private_connection_peering_routes_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/privateConnections/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/dnsBindPermission"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24993,10 +47224,10 @@ def test_list_private_connection_peering_routes_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_private_connection_peering_routes(request) + client.revoke_dns_bind_permission(request) -def test_list_private_connection_peering_routes_rest_flattened(): +def test_revoke_dns_bind_permission_rest_flattened(): client = VmwareEngineClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25005,126 +47236,60 @@ def test_list_private_connection_peering_routes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + "name": "projects/sample1/locations/sample2/dnsBindPermission" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vmwareengine.ListPrivateConnectionPeeringRoutesResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_private_connection_peering_routes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/privateConnections/*}/peeringRoutes" - % client.transport._host, - args[1], - ) - - -def test_list_private_connection_peering_routes_rest_flattened_error( - transport: str = "rest", -): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_private_connection_peering_routes( - vmwareengine.ListPrivateConnectionPeeringRoutesRequest(), - parent="parent_value", - ) - - -def test_list_private_connection_peering_routes_rest_pager(transport: str = "rest"): - client = VmwareEngineClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="abc", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[], - next_page_token="def", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - ], - next_page_token="ghi", - ), - vmwareengine.ListPrivateConnectionPeeringRoutesResponse( - peering_routes=[ - vmwareengine_resources.PeeringRoute(), - vmwareengine_resources.PeeringRoute(), - ], - ), - ) - # Two responses for two calls - response = response + response + client.revoke_dns_bind_permission(**mock_args) - # Wrap the values into proper Response objs - response = tuple( - vmwareengine.ListPrivateConnectionPeeringRoutesResponse.to_json(x) - for x in response + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dnsBindPermission}:revoke" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/privateConnections/sample3" - } - - pager = client.list_private_connection_peering_routes(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, vmwareengine_resources.PeeringRoute) for i in results) +def test_revoke_dns_bind_permission_rest_flattened_error(transport: str = "rest"): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pages = list( - client.list_private_connection_peering_routes(request=sample_request).pages + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.revoke_dns_bind_permission( + vmwareengine.RevokeDnsBindPermissionRequest(), + name="name_value", + principal=vmwareengine_resources.Principal(user="user_value"), ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + + +def test_revoke_dns_bind_permission_rest_error(): + client = VmwareEngineClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) def test_credentials_transport_error(): @@ -25277,15 +47442,41 @@ def test_vmware_engine_base_transport(): "create_cluster", "update_cluster", "delete_cluster", + "list_nodes", + "get_node", + "list_external_addresses", + "fetch_network_policy_external_addresses", + "get_external_address", + "create_external_address", + "update_external_address", + "delete_external_address", "list_subnets", "get_subnet", "update_subnet", + "list_external_access_rules", + "get_external_access_rule", + "create_external_access_rule", + "update_external_access_rule", + "delete_external_access_rule", + "list_logging_servers", + "get_logging_server", + "create_logging_server", + "update_logging_server", + "delete_logging_server", "list_node_types", "get_node_type", "show_nsx_credentials", "show_vcenter_credentials", "reset_nsx_credentials", "reset_vcenter_credentials", + "get_dns_forwarding", + "update_dns_forwarding", + "get_network_peering", + "list_network_peerings", + "create_network_peering", + "delete_network_peering", + "update_network_peering", + "list_peering_routes", "create_hcx_activation_key", "list_hcx_activation_keys", "get_hcx_activation_key", @@ -25294,6 +47485,12 @@ def test_vmware_engine_base_transport(): "create_network_policy", "update_network_policy", "delete_network_policy", + "list_management_dns_zone_bindings", + "get_management_dns_zone_binding", + "create_management_dns_zone_binding", + "update_management_dns_zone_binding", + "delete_management_dns_zone_binding", + "repair_management_dns_zone_binding", "create_vmware_engine_network", "update_vmware_engine_network", "delete_vmware_engine_network", @@ -25305,6 +47502,9 @@ def test_vmware_engine_base_transport(): "update_private_connection", "delete_private_connection", "list_private_connection_peering_routes", + "grant_dns_bind_permission", + "get_dns_bind_permission", + "revoke_dns_bind_permission", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -25623,6 +47823,30 @@ def test_vmware_engine_client_transport_session_collision(transport_name): session1 = client1.transport.delete_cluster._session session2 = client2.transport.delete_cluster._session assert session1 != session2 + session1 = client1.transport.list_nodes._session + session2 = client2.transport.list_nodes._session + assert session1 != session2 + session1 = client1.transport.get_node._session + session2 = client2.transport.get_node._session + assert session1 != session2 + session1 = client1.transport.list_external_addresses._session + session2 = client2.transport.list_external_addresses._session + assert session1 != session2 + session1 = client1.transport.fetch_network_policy_external_addresses._session + session2 = client2.transport.fetch_network_policy_external_addresses._session + assert session1 != session2 + session1 = client1.transport.get_external_address._session + session2 = client2.transport.get_external_address._session + assert session1 != session2 + session1 = client1.transport.create_external_address._session + session2 = client2.transport.create_external_address._session + assert session1 != session2 + session1 = client1.transport.update_external_address._session + session2 = client2.transport.update_external_address._session + assert session1 != session2 + session1 = client1.transport.delete_external_address._session + session2 = client2.transport.delete_external_address._session + assert session1 != session2 session1 = client1.transport.list_subnets._session session2 = client2.transport.list_subnets._session assert session1 != session2 @@ -25632,6 +47856,36 @@ def test_vmware_engine_client_transport_session_collision(transport_name): session1 = client1.transport.update_subnet._session session2 = client2.transport.update_subnet._session assert session1 != session2 + session1 = client1.transport.list_external_access_rules._session + session2 = client2.transport.list_external_access_rules._session + assert session1 != session2 + session1 = client1.transport.get_external_access_rule._session + session2 = client2.transport.get_external_access_rule._session + assert session1 != session2 + session1 = client1.transport.create_external_access_rule._session + session2 = client2.transport.create_external_access_rule._session + assert session1 != session2 + session1 = client1.transport.update_external_access_rule._session + session2 = client2.transport.update_external_access_rule._session + assert session1 != session2 + session1 = client1.transport.delete_external_access_rule._session + session2 = client2.transport.delete_external_access_rule._session + assert session1 != session2 + session1 = client1.transport.list_logging_servers._session + session2 = client2.transport.list_logging_servers._session + assert session1 != session2 + session1 = client1.transport.get_logging_server._session + session2 = client2.transport.get_logging_server._session + assert session1 != session2 + session1 = client1.transport.create_logging_server._session + session2 = client2.transport.create_logging_server._session + assert session1 != session2 + session1 = client1.transport.update_logging_server._session + session2 = client2.transport.update_logging_server._session + assert session1 != session2 + session1 = client1.transport.delete_logging_server._session + session2 = client2.transport.delete_logging_server._session + assert session1 != session2 session1 = client1.transport.list_node_types._session session2 = client2.transport.list_node_types._session assert session1 != session2 @@ -25650,6 +47904,30 @@ def test_vmware_engine_client_transport_session_collision(transport_name): session1 = client1.transport.reset_vcenter_credentials._session session2 = client2.transport.reset_vcenter_credentials._session assert session1 != session2 + session1 = client1.transport.get_dns_forwarding._session + session2 = client2.transport.get_dns_forwarding._session + assert session1 != session2 + session1 = client1.transport.update_dns_forwarding._session + session2 = client2.transport.update_dns_forwarding._session + assert session1 != session2 + session1 = client1.transport.get_network_peering._session + session2 = client2.transport.get_network_peering._session + assert session1 != session2 + session1 = client1.transport.list_network_peerings._session + session2 = client2.transport.list_network_peerings._session + assert session1 != session2 + session1 = client1.transport.create_network_peering._session + session2 = client2.transport.create_network_peering._session + assert session1 != session2 + session1 = client1.transport.delete_network_peering._session + session2 = client2.transport.delete_network_peering._session + assert session1 != session2 + session1 = client1.transport.update_network_peering._session + session2 = client2.transport.update_network_peering._session + assert session1 != session2 + session1 = client1.transport.list_peering_routes._session + session2 = client2.transport.list_peering_routes._session + assert session1 != session2 session1 = client1.transport.create_hcx_activation_key._session session2 = client2.transport.create_hcx_activation_key._session assert session1 != session2 @@ -25674,6 +47952,24 @@ def test_vmware_engine_client_transport_session_collision(transport_name): session1 = client1.transport.delete_network_policy._session session2 = client2.transport.delete_network_policy._session assert session1 != session2 + session1 = client1.transport.list_management_dns_zone_bindings._session + session2 = client2.transport.list_management_dns_zone_bindings._session + assert session1 != session2 + session1 = client1.transport.get_management_dns_zone_binding._session + session2 = client2.transport.get_management_dns_zone_binding._session + assert session1 != session2 + session1 = client1.transport.create_management_dns_zone_binding._session + session2 = client2.transport.create_management_dns_zone_binding._session + assert session1 != session2 + session1 = client1.transport.update_management_dns_zone_binding._session + session2 = client2.transport.update_management_dns_zone_binding._session + assert session1 != session2 + session1 = client1.transport.delete_management_dns_zone_binding._session + session2 = client2.transport.delete_management_dns_zone_binding._session + assert session1 != session2 + session1 = client1.transport.repair_management_dns_zone_binding._session + session2 = client2.transport.repair_management_dns_zone_binding._session + assert session1 != session2 session1 = client1.transport.create_vmware_engine_network._session session2 = client2.transport.create_vmware_engine_network._session assert session1 != session2 @@ -25707,6 +48003,15 @@ def test_vmware_engine_client_transport_session_collision(transport_name): session1 = client1.transport.list_private_connection_peering_routes._session session2 = client2.transport.list_private_connection_peering_routes._session assert session1 != session2 + session1 = client1.transport.grant_dns_bind_permission._session + session2 = client2.transport.grant_dns_bind_permission._session + assert session1 != session2 + session1 = client1.transport.get_dns_bind_permission._session + session2 = client2.transport.get_dns_bind_permission._session + assert session1 != session2 + session1 = client1.transport.revoke_dns_bind_permission._session + session2 = client2.transport.revoke_dns_bind_permission._session + assert session1 != session2 def test_vmware_engine_grpc_transport_channel(): @@ -25890,11 +48195,122 @@ def test_parse_cluster_path(): assert expected == actual -def test_hcx_activation_key_path(): +def test_dns_bind_permission_path(): project = "winkle" location = "nautilus" - private_cloud = "scallop" - hcx_activation_key = "abalone" + expected = "projects/{project}/locations/{location}/dnsBindPermission".format( + project=project, + location=location, + ) + actual = VmwareEngineClient.dns_bind_permission_path(project, location) + assert expected == actual + + +def test_parse_dns_bind_permission_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VmwareEngineClient.dns_bind_permission_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_dns_bind_permission_path(path) + assert expected == actual + + +def test_dns_forwarding_path(): + project = "squid" + location = "clam" + private_cloud = "whelk" + expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/dnsForwarding".format( + project=project, + location=location, + private_cloud=private_cloud, + ) + actual = VmwareEngineClient.dns_forwarding_path(project, location, private_cloud) + assert expected == actual + + +def test_parse_dns_forwarding_path(): + expected = { + "project": "octopus", + "location": "oyster", + "private_cloud": "nudibranch", + } + path = VmwareEngineClient.dns_forwarding_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_dns_forwarding_path(path) + assert expected == actual + + +def test_external_access_rule_path(): + project = "cuttlefish" + location = "mussel" + network_policy = "winkle" + external_access_rule = "nautilus" + expected = "projects/{project}/locations/{location}/networkPolicies/{network_policy}/externalAccessRules/{external_access_rule}".format( + project=project, + location=location, + network_policy=network_policy, + external_access_rule=external_access_rule, + ) + actual = VmwareEngineClient.external_access_rule_path( + project, location, network_policy, external_access_rule + ) + assert expected == actual + + +def test_parse_external_access_rule_path(): + expected = { + "project": "scallop", + "location": "abalone", + "network_policy": "squid", + "external_access_rule": "clam", + } + path = VmwareEngineClient.external_access_rule_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_external_access_rule_path(path) + assert expected == actual + + +def test_external_address_path(): + project = "whelk" + location = "octopus" + private_cloud = "oyster" + external_address = "nudibranch" + expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/externalAddresses/{external_address}".format( + project=project, + location=location, + private_cloud=private_cloud, + external_address=external_address, + ) + actual = VmwareEngineClient.external_address_path( + project, location, private_cloud, external_address + ) + assert expected == actual + + +def test_parse_external_address_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "private_cloud": "winkle", + "external_address": "nautilus", + } + path = VmwareEngineClient.external_address_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_external_address_path(path) + assert expected == actual + + +def test_hcx_activation_key_path(): + project = "scallop" + location = "abalone" + private_cloud = "squid" + hcx_activation_key = "clam" expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/hcxActivationKeys/{hcx_activation_key}".format( project=project, location=location, @@ -25909,10 +48325,10 @@ def test_hcx_activation_key_path(): def test_parse_hcx_activation_key_path(): expected = { - "project": "squid", - "location": "clam", - "private_cloud": "whelk", - "hcx_activation_key": "octopus", + "project": "whelk", + "location": "octopus", + "private_cloud": "oyster", + "hcx_activation_key": "nudibranch", } path = VmwareEngineClient.hcx_activation_key_path(**expected) @@ -25921,9 +48337,71 @@ def test_parse_hcx_activation_key_path(): assert expected == actual +def test_logging_server_path(): + project = "cuttlefish" + location = "mussel" + private_cloud = "winkle" + logging_server = "nautilus" + expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/loggingServers/{logging_server}".format( + project=project, + location=location, + private_cloud=private_cloud, + logging_server=logging_server, + ) + actual = VmwareEngineClient.logging_server_path( + project, location, private_cloud, logging_server + ) + assert expected == actual + + +def test_parse_logging_server_path(): + expected = { + "project": "scallop", + "location": "abalone", + "private_cloud": "squid", + "logging_server": "clam", + } + path = VmwareEngineClient.logging_server_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_logging_server_path(path) + assert expected == actual + + +def test_management_dns_zone_binding_path(): + project = "whelk" + location = "octopus" + private_cloud = "oyster" + management_dns_zone_binding = "nudibranch" + expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/managementDnsZoneBindings/{management_dns_zone_binding}".format( + project=project, + location=location, + private_cloud=private_cloud, + management_dns_zone_binding=management_dns_zone_binding, + ) + actual = VmwareEngineClient.management_dns_zone_binding_path( + project, location, private_cloud, management_dns_zone_binding + ) + assert expected == actual + + +def test_parse_management_dns_zone_binding_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "private_cloud": "winkle", + "management_dns_zone_binding": "nautilus", + } + path = VmwareEngineClient.management_dns_zone_binding_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_management_dns_zone_binding_path(path) + assert expected == actual + + def test_network_path(): - project = "oyster" - network = "nudibranch" + project = "scallop" + network = "abalone" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -25934,8 +48412,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "cuttlefish", - "network": "mussel", + "project": "squid", + "network": "clam", } path = VmwareEngineClient.network_path(**expected) @@ -25944,6 +48422,32 @@ def test_parse_network_path(): assert expected == actual +def test_network_peering_path(): + project = "whelk" + location = "octopus" + network_peering = "oyster" + expected = "projects/{project}/locations/{location}/networkPeerings/{network_peering}".format( + project=project, + location=location, + network_peering=network_peering, + ) + actual = VmwareEngineClient.network_peering_path(project, location, network_peering) + assert expected == actual + + +def test_parse_network_peering_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "network_peering": "mussel", + } + path = VmwareEngineClient.network_peering_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_network_peering_path(path) + assert expected == actual + + def test_network_policy_path(): project = "winkle" location = "nautilus" @@ -25970,10 +48474,44 @@ def test_parse_network_policy_path(): assert expected == actual -def test_node_type_path(): +def test_node_path(): project = "whelk" location = "octopus" - node_type = "oyster" + private_cloud = "oyster" + cluster = "nudibranch" + node = "cuttlefish" + expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/clusters/{cluster}/nodes/{node}".format( + project=project, + location=location, + private_cloud=private_cloud, + cluster=cluster, + node=node, + ) + actual = VmwareEngineClient.node_path( + project, location, private_cloud, cluster, node + ) + assert expected == actual + + +def test_parse_node_path(): + expected = { + "project": "mussel", + "location": "winkle", + "private_cloud": "nautilus", + "cluster": "scallop", + "node": "abalone", + } + path = VmwareEngineClient.node_path(**expected) + + # Check that the path construction is reversible. + actual = VmwareEngineClient.parse_node_path(path) + assert expected == actual + + +def test_node_type_path(): + project = "squid" + location = "clam" + node_type = "whelk" expected = "projects/{project}/locations/{location}/nodeTypes/{node_type}".format( project=project, location=location, @@ -25985,9 +48523,9 @@ def test_node_type_path(): def test_parse_node_type_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "node_type": "mussel", + "project": "octopus", + "location": "oyster", + "node_type": "nudibranch", } path = VmwareEngineClient.node_type_path(**expected) @@ -25997,9 +48535,9 @@ def test_parse_node_type_path(): def test_private_cloud_path(): - project = "winkle" - location = "nautilus" - private_cloud = "scallop" + project = "cuttlefish" + location = "mussel" + private_cloud = "winkle" expected = ( "projects/{project}/locations/{location}/privateClouds/{private_cloud}".format( project=project, @@ -26013,9 +48551,9 @@ def test_private_cloud_path(): def test_parse_private_cloud_path(): expected = { - "project": "abalone", - "location": "squid", - "private_cloud": "clam", + "project": "nautilus", + "location": "scallop", + "private_cloud": "abalone", } path = VmwareEngineClient.private_cloud_path(**expected) @@ -26025,9 +48563,9 @@ def test_parse_private_cloud_path(): def test_private_connection_path(): - project = "whelk" - location = "octopus" - private_connection = "oyster" + project = "squid" + location = "clam" + private_connection = "whelk" expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format( project=project, location=location, @@ -26041,9 +48579,9 @@ def test_private_connection_path(): def test_parse_private_connection_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "private_connection": "mussel", + "project": "octopus", + "location": "oyster", + "private_connection": "nudibranch", } path = VmwareEngineClient.private_connection_path(**expected) @@ -26053,10 +48591,10 @@ def test_parse_private_connection_path(): def test_subnet_path(): - project = "winkle" - location = "nautilus" - private_cloud = "scallop" - subnet = "abalone" + project = "cuttlefish" + location = "mussel" + private_cloud = "winkle" + subnet = "nautilus" expected = "projects/{project}/locations/{location}/privateClouds/{private_cloud}/subnets/{subnet}".format( project=project, location=location, @@ -26069,10 +48607,10 @@ def test_subnet_path(): def test_parse_subnet_path(): expected = { - "project": "squid", - "location": "clam", - "private_cloud": "whelk", - "subnet": "octopus", + "project": "scallop", + "location": "abalone", + "private_cloud": "squid", + "subnet": "clam", } path = VmwareEngineClient.subnet_path(**expected) @@ -26082,9 +48620,9 @@ def test_parse_subnet_path(): def test_vmware_engine_network_path(): - project = "oyster" - location = "nudibranch" - vmware_engine_network = "cuttlefish" + project = "whelk" + location = "octopus" + vmware_engine_network = "oyster" expected = "projects/{project}/locations/{location}/vmwareEngineNetworks/{vmware_engine_network}".format( project=project, location=location, @@ -26098,9 +48636,9 @@ def test_vmware_engine_network_path(): def test_parse_vmware_engine_network_path(): expected = { - "project": "mussel", - "location": "winkle", - "vmware_engine_network": "nautilus", + "project": "nudibranch", + "location": "cuttlefish", + "vmware_engine_network": "mussel", } path = VmwareEngineClient.vmware_engine_network_path(**expected) @@ -26110,7 +48648,7 @@ def test_parse_vmware_engine_network_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -26120,7 +48658,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nautilus", } path = VmwareEngineClient.common_billing_account_path(**expected) @@ -26130,7 +48668,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -26140,7 +48678,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "abalone", } path = VmwareEngineClient.common_folder_path(**expected) @@ -26150,7 +48688,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -26160,7 +48698,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "clam", } path = VmwareEngineClient.common_organization_path(**expected) @@ -26170,7 +48708,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -26180,7 +48718,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "octopus", } path = VmwareEngineClient.common_project_path(**expected) @@ -26190,8 +48728,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -26202,8 +48740,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "cuttlefish", + "location": "mussel", } path = VmwareEngineClient.common_location_path(**expected) diff --git a/packages/google-maps-fleetengine-delivery/CHANGELOG.md b/packages/google-maps-fleetengine-delivery/CHANGELOG.md index 4fc1d3d2cf62..264a955796f5 100644 --- a/packages/google-maps-fleetengine-delivery/CHANGELOG.md +++ b/packages/google-maps-fleetengine-delivery/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.1.2...google-maps-fleetengine-delivery-v0.1.3) (2023-12-13) + + +### Features + +* [google-maps-fleetengine-delivery] add trace_id to Fleet Engine headers ([#12120](https://github.com/googleapis/google-cloud-python/issues/12120)) ([94238c8](https://github.com/googleapis/google-cloud-python/commit/94238c863b1806cc50d65431c425ef90dbd6ced7)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-delivery-v0.1.1...google-maps-fleetengine-delivery-v0.1.2) (2023-12-07) diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py index cf99f3acb1ee..536d6648a6f0 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py index cf99f3acb1ee..536d6648a6f0 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/header.py b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/header.py index cbfbf97291aa..cd3f18095f68 100644 --- a/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/header.py +++ b/packages/google-maps-fleetengine-delivery/google/maps/fleetengine_delivery_v1/types/header.py @@ -70,6 +70,9 @@ class DeliveryRequestHeader(proto.Message): android_api_level (int): Android API level of the calling SDK, only applicable for the Android SDKs. Field value example: ``23``. + trace_id (str): + Optional ID that can be provided for logging + purposes in order to identify the request. """ class SdkType(proto.Enum): @@ -156,6 +159,10 @@ class Platform(proto.Enum): proto.INT32, number=11, ) + trace_id: str = proto.Field( + proto.STRING, + number=12, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json index 36e24ebfdd8b..3c2b8c8ff04b 100644 --- a/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json +++ b/packages/google-maps-fleetengine-delivery/samples/generated_samples/snippet_metadata_maps.fleetengine.delivery.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine-delivery", - "version": "0.1.2" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-maps-fleetengine/CHANGELOG.md b/packages/google-maps-fleetengine/CHANGELOG.md index 2eecc96f26c4..fe8911270359 100644 --- a/packages/google-maps-fleetengine/CHANGELOG.md +++ b/packages/google-maps-fleetengine/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-v0.1.3...google-maps-fleetengine-v0.1.4) (2024-01-12) + + +### Documentation + +* [google-maps-fleetengine] better comments on SearchVehicle fields ([#12186](https://github.com/googleapis/google-cloud-python/issues/12186)) ([9ef70f7](https://github.com/googleapis/google-cloud-python/commit/9ef70f7cfd9eaeaad4479bae02a77993b9c52b21)) + +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-v0.1.2...google-maps-fleetengine-v0.1.3) (2023-12-13) + + +### Features + +* [google-maps-fleetengine] add trace_id to Fleet Engine headers ([#12119](https://github.com/googleapis/google-cloud-python/issues/12119)) ([f0b84e7](https://github.com/googleapis/google-cloud-python/commit/f0b84e76439884a3aa2fe9472aa3fce41c19e375)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-maps-fleetengine-v0.1.1...google-maps-fleetengine-v0.1.2) (2023-12-07) diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py index cf99f3acb1ee..286ce4ad5531 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py index cf99f3acb1ee..286ce4ad5531 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/header.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/header.py index e7742922e3bc..a5acb7536044 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/header.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/header.py @@ -70,6 +70,9 @@ class RequestHeader(proto.Message): android_api_level (int): Android API level of the calling SDK, only applicable for the Android SDKs. Field value example: ``23``. + trace_id (str): + Optional ID that can be provided for logging + purposes in order to identify the request. """ class SdkType(proto.Enum): @@ -156,6 +159,10 @@ class Platform(proto.Enum): proto.INT32, number=11, ) + trace_id: str = proto.Field( + proto.STRING, + number=12, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/vehicle_api.py b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/vehicle_api.py index 6c705233c53f..44986ea93199 100644 --- a/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/vehicle_api.py +++ b/packages/google-maps-fleetengine/google/maps/fleetengine_v1/types/vehicle_api.py @@ -350,14 +350,10 @@ class SearchVehiclesRequest(proto.Message): greater than or equal to one. The driver is not considered in the capacity value. trip_types (MutableSequence[google.maps.fleetengine_v1.types.TripType]): - Required. Represents the type of proposed trip. Eligible - vehicles are those that can support at least one of the - specified trip type. - - ``EXCLUSIVE`` and ``SHARED`` may not be included together. - ``SHARED`` is not supported when ``current_trips_present`` - is ``CURRENT_TRIPS_PRESENT_UNSPECIFIED``. - ``UNKNOWN_TRIP_TYPE`` is not allowed. + Required. Represents the type of proposed trip. Must include + exactly one type. ``UNKNOWN_TRIP_TYPE`` is not allowed. + Restricts the search to only those vehicles that can support + that trip type. maximum_staleness (google.protobuf.duration_pb2.Duration): Restricts the search to only those vehicles that have sent location updates to Fleet Engine @@ -438,26 +434,26 @@ class SearchVehiclesRequest(proto.Message): Required. Specifies the desired ordering criterion for results. include_back_to_back (bool): - Indicates if a vehicle with a single active trip is eligible - for another match. If ``false``, vehicles with assigned - trips are excluded from the search results. If ``true``, - search results include vehicles with ``TripStatus`` of - ``ENROUTE_TO_DROPOFF``. - - This field is only considered if a single ``trip_type`` of - ``EXCLUSIVE`` is specified. + This indicates if vehicles with a single active trip are + eligible for this search. This field is only used when + ``current_trips_present`` is unspecified. When + ``current_trips_present`` is unspecified and this field is + ``false``, vehicles with assigned trips are excluded from + the search results. When ``current_trips_present`` is + unspecified and this field is ``true``, search results can + include vehicles with one active trip that has a status of + ``ENROUTE_TO_DROPOFF``. When ``current_trips_present`` is + specified, this field cannot be set to true. The default value is ``false``. trip_id (str): Indicates the trip associated with this ``SearchVehicleRequest``. current_trips_present (google.maps.fleetengine_v1.types.SearchVehiclesRequest.CurrentTripsPresent): - Restricts vehicles from appearing in the search results - based on their current trips. - - When current_trips_present is ``NONE`` or ``ANY``, - ``trip_types`` can be either ``EXCLUSIVE`` or ``SHARED``, - but not both. + This indicates if vehicles with active trips are eligible + for this search. This must be set to something other than + ``CURRENT_TRIPS_PRESENT_UNSPECIFIED`` if ``trip_type`` + includes ``SHARED``. filter (str): Optional. A filter query to apply when searching vehicles. See http://aip.dev/160 for examples of the filter syntax. @@ -531,18 +527,16 @@ class CurrentTripsPresent(proto.Enum): Values: CURRENT_TRIPS_PRESENT_UNSPECIFIED (0): - Only vehicles without trips can appear in search results. A - validation exception is thrown if ``include_back_to_back`` - is true. See the ``include_back_to_back`` flag for more - details. + The availability of vehicles with trips present is governed + by the ``include_back_to_back`` field. NONE (1): - Vehicles without trips can appear in search results. A - validation exception is thrown if ``include_back_to_back`` - is true. + Vehicles without trips can appear in search results. When + this value is used, ``include_back_to_back`` cannot be + ``true``. ANY (2): Vehicles with at most 5 current trips and 10 waypoints are - included in the search results. A validation exception is - thrown if ``include_back_to_back`` is true. + included in the search results. When this value is used, + ``include_back_to_back`` cannot be ``true``. """ CURRENT_TRIPS_PRESENT_UNSPECIFIED = 0 NONE = 1 diff --git a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json index c84070eacb83..39db55680b03 100644 --- a/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json +++ b/packages/google-maps-fleetengine/samples/generated_samples/snippet_metadata_maps.fleetengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-fleetengine", - "version": "0.1.2" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-maps-places/CHANGELOG.md b/packages/google-maps-places/CHANGELOG.md index f7bf9b4937b3..1f07bfd90652 100644 --- a/packages/google-maps-places/CHANGELOG.md +++ b/packages/google-maps-places/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.5...google-maps-places-v0.1.6) (2023-12-07) + + +### Features + +* Add new primary type fields ([a74938f](https://github.com/googleapis/google-cloud-python/commit/a74938fa2ed19348d703d23ffb13545423e8b736)) +* Add new short formatted address field ([a74938f](https://github.com/googleapis/google-cloud-python/commit/a74938fa2ed19348d703d23ffb13545423e8b736)) +* Add new wheelchair accessibility fields ([a74938f](https://github.com/googleapis/google-cloud-python/commit/a74938fa2ed19348d703d23ffb13545423e8b736)) + + +### Documentation + +* Change comments for some fields in Places API ([a74938f](https://github.com/googleapis/google-cloud-python/commit/a74938fa2ed19348d703d23ffb13545423e8b736)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-maps-places-v0.1.4...google-maps-places-v0.1.5) (2023-12-07) diff --git a/packages/google-maps-places/google/maps/places/gapic_version.py b/packages/google-maps-places/google/maps/places/gapic_version.py index ae12f4dfd596..60402a4567d1 100644 --- a/packages/google-maps-places/google/maps/places/gapic_version.py +++ b/packages/google-maps-places/google/maps/places/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/gapic_version.py b/packages/google-maps-places/google/maps/places_v1/gapic_version.py index ae12f4dfd596..60402a4567d1 100644 --- a/packages/google-maps-places/google/maps/places_v1/gapic_version.py +++ b/packages/google-maps-places/google/maps/places_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py index b5b8ae1fe65f..a66be5af3576 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/async_client.py @@ -62,7 +62,7 @@ class PlacesAsyncClient: """Service definition for the Places API. Note: every request actually - requires a field mask set outside of the request proto (all/'*' is + requires a field mask set outside of the request proto (all/'*', is not assumed). That can be set via either a side channel (SystemParameterContext) over RPC, or a header (X-Goog-FieldMask) over HTTP. See: https://cloud.google.com/apis/docs/system-parameters @@ -412,9 +412,15 @@ async def sample_get_photo_media(): The request object. Request for fetching a photo of a place using a photo resource name. name (:class:`str`): - Required. The resource name of a photo. It is returned - in Place's photos.name field. Format: - places//photos//media. + Required. The resource name of a photo media in the + format: + ``places/{place_id}/photos/{photo_reference}/media``. + + The resource name of a photo as returned in a Place + object's ``photos.name`` field comes with the format + ``places/{place_id}/photos/{photo_reference}``. You need + to append ``/media`` at the end of the photo resource to + get the photo media resource name. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -480,7 +486,7 @@ async def get_place( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> place.Place: - r"""Get a Place with a place id (in a name) string. + r"""Get place details with a place id (in a name) string. .. code-block:: python @@ -513,9 +519,9 @@ async def sample_get_place(): The request object. Request for fetching a Place with a place id (in a name) string. name (:class:`str`): - Required. A place_id returned in a Place (with "places/" + Required. A place ID returned in a Place (with "places/" prefix), or equivalently the name in the same Place. - Format: places/. + Format: ``places/{place_id}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/client.py b/packages/google-maps-places/google/maps/places_v1/services/places/client.py index eb8f13584a99..09ae45dab4b3 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/client.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/client.py @@ -102,7 +102,7 @@ def get_transport_class( class PlacesClient(metaclass=PlacesClientMeta): """Service definition for the Places API. Note: every request actually - requires a field mask set outside of the request proto (all/'*' is + requires a field mask set outside of the request proto (all/'*', is not assumed). That can be set via either a side channel (SystemParameterContext) over RPC, or a header (X-Goog-FieldMask) over HTTP. See: https://cloud.google.com/apis/docs/system-parameters @@ -691,9 +691,15 @@ def sample_get_photo_media(): The request object. Request for fetching a photo of a place using a photo resource name. name (str): - Required. The resource name of a photo. It is returned - in Place's photos.name field. Format: - places//photos//media. + Required. The resource name of a photo media in the + format: + ``places/{place_id}/photos/{photo_reference}/media``. + + The resource name of a photo as returned in a Place + object's ``photos.name`` field comes with the format + ``places/{place_id}/photos/{photo_reference}``. You need + to append ``/media`` at the end of the photo resource to + get the photo media resource name. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -759,7 +765,7 @@ def get_place( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> place.Place: - r"""Get a Place with a place id (in a name) string. + r"""Get place details with a place id (in a name) string. .. code-block:: python @@ -792,9 +798,9 @@ def sample_get_place(): The request object. Request for fetching a Place with a place id (in a name) string. name (str): - Required. A place_id returned in a Place (with "places/" + Required. A place ID returned in a Place (with "places/" prefix), or equivalently the name in the same Place. - Format: places/. + Format: ``places/{place_id}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py index dc682544c43b..6b8dc0ff836a 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc.py @@ -31,7 +31,7 @@ class PlacesGrpcTransport(PlacesTransport): """gRPC backend transport for Places. Service definition for the Places API. Note: every request actually - requires a field mask set outside of the request proto (all/'*' is + requires a field mask set outside of the request proto (all/'*', is not assumed). That can be set via either a side channel (SystemParameterContext) over RPC, or a header (X-Goog-FieldMask) over HTTP. See: https://cloud.google.com/apis/docs/system-parameters @@ -318,7 +318,7 @@ def get_photo_media( def get_place(self) -> Callable[[places_service.GetPlaceRequest], place.Place]: r"""Return a callable for the get place method over gRPC. - Get a Place with a place id (in a name) string. + Get place details with a place id (in a name) string. Returns: Callable[[~.GetPlaceRequest], diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py index 01c02171ec1e..c973a4e8bd05 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/grpc_asyncio.py @@ -32,7 +32,7 @@ class PlacesGrpcAsyncIOTransport(PlacesTransport): """gRPC AsyncIO backend transport for Places. Service definition for the Places API. Note: every request actually - requires a field mask set outside of the request proto (all/'*' is + requires a field mask set outside of the request proto (all/'*', is not assumed). That can be set via either a side channel (SystemParameterContext) over RPC, or a header (X-Goog-FieldMask) over HTTP. See: https://cloud.google.com/apis/docs/system-parameters @@ -326,7 +326,7 @@ def get_place( ) -> Callable[[places_service.GetPlaceRequest], Awaitable[place.Place]]: r"""Return a callable for the get place method over gRPC. - Get a Place with a place id (in a name) string. + Get place details with a place id (in a name) string. Returns: Callable[[~.GetPlaceRequest], diff --git a/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py b/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py index 6d4ac27db97f..d37f84946392 100644 --- a/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py +++ b/packages/google-maps-places/google/maps/places_v1/services/places/transports/rest.py @@ -203,7 +203,7 @@ class PlacesRestTransport(PlacesTransport): """REST backend transport for Places. Service definition for the Places API. Note: every request actually - requires a field mask set outside of the request proto (all/'*' is + requires a field mask set outside of the request proto (all/'*', is not assumed). That can be set via either a side channel (SystemParameterContext) over RPC, or a header (X-Goog-FieldMask) over HTTP. See: https://cloud.google.com/apis/docs/system-parameters diff --git a/packages/google-maps-places/google/maps/places_v1/types/attribution.py b/packages/google-maps-places/google/maps/places_v1/types/attribution.py index 6b56f07c539b..1ee0160fe4f0 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/attribution.py +++ b/packages/google-maps-places/google/maps/places_v1/types/attribution.py @@ -34,15 +34,15 @@ class AuthorAttribution(proto.Message): Attributes: display_name (str): - Output only. Name of the author of the + Name of the author of the [Photo][google.maps.places.v1.Photo] or [Review][google.maps.places.v1.Review]. uri (str): - Output only. URI of the author of the + URI of the author of the [Photo][google.maps.places.v1.Photo] or [Review][google.maps.places.v1.Review]. photo_uri (str): - Output only. Profile photo URI of the author of the + Profile photo URI of the author of the [Photo][google.maps.places.v1.Photo] or [Review][google.maps.places.v1.Review]. """ diff --git a/packages/google-maps-places/google/maps/places_v1/types/photo.py b/packages/google-maps-places/google/maps/places_v1/types/photo.py index 34e23e4a44e9..ea9aa9c2c6de 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/photo.py +++ b/packages/google-maps-places/google/maps/places_v1/types/photo.py @@ -34,17 +34,16 @@ class Photo(proto.Message): Attributes: name (str): - Output only. A reference representing this place photo which - may be used to look up this place photo again (a.k.a. the - API "resource" name: places/{place_id}/photos/{photo}). + Identifier. A reference representing this place photo which + may be used to look up this place photo again (also called + the API "resource" name: + ``places/{place_id}/photos/{photo}``). width_px (int): - Output only. The maximum available width, in - pixels. + The maximum available width, in pixels. height_px (int): - Output only. The maximum available height, in - pixels. + The maximum available height, in pixels. author_attributions (MutableSequence[google.maps.places_v1.types.AuthorAttribution]): - Output only. This photo's authors. + This photo's authors. """ name: str = proto.Field( diff --git a/packages/google-maps-places/google/maps/places_v1/types/place.py b/packages/google-maps-places/google/maps/places_v1/types/place.py index b5cec0c4975f..c85b3a3bd271 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/place.py +++ b/packages/google-maps-places/google/maps/places_v1/types/place.py @@ -68,249 +68,270 @@ class Place(proto.Message): Attributes: name (str): - Output only. An ID representing this place which may be used - to look up this place again (a.k.a. the API "resource" name: - places/). + An ID representing this place which may be used to look up + this place again (a.k.a. the API "resource" name: + places/place_id). id (str): - Output only. The unique identifier of a - place. + The unique identifier of a place. display_name (google.type.localized_text_pb2.LocalizedText): - Output only. The localized name of the place, - suitable as a short human-readable description. - For example, "Google Sydney", "Starbucks", - "Pyrmont", etc. + The localized name of the place, suitable as + a short human-readable description. For example, + "Google Sydney", "Starbucks", "Pyrmont", etc. types (MutableSequence[str]): - Output only. A set of type tags for this - result. For example, "political" and "locality". + A set of type tags for this result. For + example, "political" and "locality". For the + complete list of possible values, see Table A + and Table B at + https://developers.google.com/maps/documentation/places/web-service/place-types + primary_type (str): + The primary type of the given result. This + type must one of the Places API supported types. + For example, "restaurant", "cafe", "airport", + etc. A place can only have a single primary + type. For the complete list of possible values, + see Table A and Table B at + https://developers.google.com/maps/documentation/places/web-service/place-types + primary_type_display_name (google.type.localized_text_pb2.LocalizedText): + The display name of the primary type, + localized to the request language if applicable. + For the complete list of possible values, see + Table A and Table B at + https://developers.google.com/maps/documentation/places/web-service/place-types national_phone_number (str): - Output only. A human-readable phone number - for the place, in national format. + A human-readable phone number for the place, + in national format. international_phone_number (str): - Output only. A human-readable phone number - for the place, in international format. + A human-readable phone number for the place, + in international format. formatted_address (str): - Output only. A full, human-readable address - for this place. + A full, human-readable address for this + place. + short_formatted_address (str): + A short, human-readable address for this + place. address_components (MutableSequence[google.maps.places_v1.types.Place.AddressComponent]): - Output only. Repeated components for each - locality level. + Repeated components for each locality level. Note the + following facts about the address_components[] array: + + - The array of address components may contain more + components than the formatted_address. + - The array does not necessarily include all the political + entities that contain an address, apart from those + included in the formatted_address. To retrieve all the + political entities that contain a specific address, you + should use reverse geocoding, passing the + latitude/longitude of the address as a parameter to the + request. + - The format of the response is not guaranteed to remain + the same between requests. In particular, the number of + address_components varies based on the address requested + and can change over time for the same address. A + component can change position in the array. The type of + the component can change. A particular component may be + missing in a later response. plus_code (google.maps.places_v1.types.Place.PlusCode): - Output only. Plus code of the place location - lat/long. + Plus code of the place location lat/long. location (google.type.latlng_pb2.LatLng): - Output only. The position of this place. + The position of this place. viewport (google.geo.type.types.Viewport): - Output only. A viewport suitable for - displaying the place on an average-sized map. + A viewport suitable for displaying the place + on an average-sized map. rating (float): - Output only. A rating between 1.0 and 5.0, - based on user reviews of this place. + A rating between 1.0 and 5.0, based on user + reviews of this place. google_maps_uri (str): - Output only. A URL providing more information - about this place. + A URL providing more information about this + place. website_uri (str): - Output only. The authoritative website for - this place, e.g. a business' homepage. Note that - for places that are part of a chain (e.g. an - IKEA store), this will usually be the website - for the individual store, not the overall chain. + The authoritative website for this place, + e.g. a business' homepage. Note that for places + that are part of a chain (e.g. an IKEA store), + this will usually be the website for the + individual store, not the overall chain. reviews (MutableSequence[google.maps.places_v1.types.Review]): - Output only. List of reviews about this - place. + List of reviews about this place, sorted by + relevance. regular_opening_hours (google.maps.places_v1.types.Place.OpeningHours): - Output only. The regular hours of operation. + The regular hours of operation. utc_offset_minutes (int): - Output only. Number of minutes this place's - timezone is currently offset from UTC. This is - expressed in minutes to support timezones that - are offset by fractions of an hour, e.g. X hours - and 15 minutes. + Number of minutes this place's timezone is + currently offset from UTC. This is expressed in + minutes to support timezones that are offset by + fractions of an hour, e.g. X hours and 15 + minutes. This field is a member of `oneof`_ ``_utc_offset_minutes``. photos (MutableSequence[google.maps.places_v1.types.Photo]): - Output only. Information (including - references) about photos of this place. + Information (including references) about + photos of this place. adr_format_address (str): - Output only. The place's address in adr - microformat: http://microformats.org/wiki/adr. + The place's address in adr microformat: + http://microformats.org/wiki/adr. business_status (google.maps.places_v1.types.Place.BusinessStatus): - Output only. The business status for the - place. + The business status for the place. price_level (google.maps.places_v1.types.PriceLevel): - Output only. Price level of the place. + Price level of the place. attributions (MutableSequence[google.maps.places_v1.types.Place.Attribution]): - Output only. A set of data provider that must - be shown with this result. + A set of data provider that must be shown + with this result. user_rating_count (int): - Output only. The total number of reviews - (with or without text) for this place. + The total number of reviews (with or without + text) for this place. This field is a member of `oneof`_ ``_user_rating_count``. icon_mask_base_uri (str): - Output only. A truncated URL to an v2 icon - mask. User can access different icon type by - appending type suffix to the end (eg, ".svg" or - ".png"). + A truncated URL to an icon mask. User can + access different icon type by appending type + suffix to the end (eg, ".svg" or ".png"). icon_background_color (str): - Output only. Background color for icon_mask in hex format, - e.g. #909CE1. + Background color for icon_mask in hex format, e.g. #909CE1. takeout (bool): - Output only. Specifies if the business - supports takeout. + Specifies if the business supports takeout. This field is a member of `oneof`_ ``_takeout``. delivery (bool): - Output only. Specifies if the business - supports delivery. + Specifies if the business supports delivery. This field is a member of `oneof`_ ``_delivery``. dine_in (bool): - Output only. Specifies if the business - supports indoor or outdoor seating options. + Specifies if the business supports indoor or + outdoor seating options. This field is a member of `oneof`_ ``_dine_in``. curbside_pickup (bool): - Output only. Specifies if the business - supports curbside pickup. + Specifies if the business supports curbside + pickup. This field is a member of `oneof`_ ``_curbside_pickup``. reservable (bool): - Output only. Specifies if the place supports - reservations. + Specifies if the place supports reservations. This field is a member of `oneof`_ ``_reservable``. serves_breakfast (bool): - Output only. Specifies if the place serves - breakfast. + Specifies if the place serves breakfast. This field is a member of `oneof`_ ``_serves_breakfast``. serves_lunch (bool): - Output only. Specifies if the place serves - lunch. + Specifies if the place serves lunch. This field is a member of `oneof`_ ``_serves_lunch``. serves_dinner (bool): - Output only. Specifies if the place serves - dinner. + Specifies if the place serves dinner. This field is a member of `oneof`_ ``_serves_dinner``. serves_beer (bool): - Output only. Specifies if the place serves - beer. + Specifies if the place serves beer. This field is a member of `oneof`_ ``_serves_beer``. serves_wine (bool): - Output only. Specifies if the place serves - wine. + Specifies if the place serves wine. This field is a member of `oneof`_ ``_serves_wine``. serves_brunch (bool): - Output only. Specifies if the place serves - brunch. + Specifies if the place serves brunch. This field is a member of `oneof`_ ``_serves_brunch``. serves_vegetarian_food (bool): - Output only. Specifies if the place serves - vegetarian food. + Specifies if the place serves vegetarian + food. This field is a member of `oneof`_ ``_serves_vegetarian_food``. current_opening_hours (google.maps.places_v1.types.Place.OpeningHours): - Output only. The hours of operation for the next seven days - (including today). The time period starts at midnight on the - date of the request and ends at 11:59 pm six days later. - This field includes the special_days subfield of all hours, - set for dates that have exceptional hours. + The hours of operation for the next seven days (including + today). The time period starts at midnight on the date of + the request and ends at 11:59 pm six days later. This field + includes the special_days subfield of all hours, set for + dates that have exceptional hours. current_secondary_opening_hours (MutableSequence[google.maps.places_v1.types.Place.OpeningHours]): - Output only. Contains an array of entries for the next seven - days including information about secondary hours of a - business. Secondary hours are different from a business's - main hours. For example, a restaurant can specify drive - through hours or delivery hours as its secondary hours. This - field populates the type subfield, which draws from a - predefined list of opening hours types (such as - DRIVE_THROUGH, PICKUP, or TAKEOUT) based on the types of the - place. This field includes the special_days subfield of all - hours, set for dates that have exceptional hours. + Contains an array of entries for the next seven days + including information about secondary hours of a business. + Secondary hours are different from a business's main hours. + For example, a restaurant can specify drive through hours or + delivery hours as its secondary hours. This field populates + the type subfield, which draws from a predefined list of + opening hours types (such as DRIVE_THROUGH, PICKUP, or + TAKEOUT) based on the types of the place. This field + includes the special_days subfield of all hours, set for + dates that have exceptional hours. regular_secondary_opening_hours (MutableSequence[google.maps.places_v1.types.Place.OpeningHours]): - Output only. Contains an array of entries for information - about regular secondary hours of a business. Secondary hours - are different from a business's main hours. For example, a - restaurant can specify drive through hours or delivery hours - as its secondary hours. This field populates the type - subfield, which draws from a predefined list of opening - hours types (such as DRIVE_THROUGH, PICKUP, or TAKEOUT) - based on the types of the place. + Contains an array of entries for information about regular + secondary hours of a business. Secondary hours are different + from a business's main hours. For example, a restaurant can + specify drive through hours or delivery hours as its + secondary hours. This field populates the type subfield, + which draws from a predefined list of opening hours types + (such as DRIVE_THROUGH, PICKUP, or TAKEOUT) based on the + types of the place. editorial_summary (google.type.localized_text_pb2.LocalizedText): - Output only. Contains a summary of the place. - A summary is comprised of a textual overview, - and also includes the language code for these if + Contains a summary of the place. A summary is + comprised of a textual overview, and also + includes the language code for these if applicable. Summary text must be presented as-is and can not be modified or altered. outdoor_seating (bool): - Output only. Place provides outdoor seating. + Place provides outdoor seating. This field is a member of `oneof`_ ``_outdoor_seating``. live_music (bool): - Output only. Place provides live music. + Place provides live music. This field is a member of `oneof`_ ``_live_music``. menu_for_children (bool): - Output only. Place has a children's menu. + Place has a children's menu. This field is a member of `oneof`_ ``_menu_for_children``. serves_cocktails (bool): - Output only. Place serves cocktails. + Place serves cocktails. This field is a member of `oneof`_ ``_serves_cocktails``. serves_dessert (bool): - Output only. Place serves dessert. + Place serves dessert. This field is a member of `oneof`_ ``_serves_dessert``. serves_coffee (bool): - Output only. Place serves coffee. + Place serves coffee. This field is a member of `oneof`_ ``_serves_coffee``. good_for_children (bool): - Output only. Place is good for children. + Place is good for children. This field is a member of `oneof`_ ``_good_for_children``. allows_dogs (bool): - Output only. Place allows dogs. + Place allows dogs. This field is a member of `oneof`_ ``_allows_dogs``. restroom (bool): - Output only. Place has restroom. + Place has restroom. This field is a member of `oneof`_ ``_restroom``. good_for_groups (bool): - Output only. Place accommodates groups. + Place accommodates groups. This field is a member of `oneof`_ ``_good_for_groups``. good_for_watching_sports (bool): - Output only. Place is suitable for watching - sports. + Place is suitable for watching sports. This field is a member of `oneof`_ ``_good_for_watching_sports``. payment_options (google.maps.places_v1.types.Place.PaymentOptions): - + Payment options the place accepts. If a + payment option data is not available, the + payment option field will be unset. parking_options (google.maps.places_v1.types.Place.ParkingOptions): - Output only. Options of parking provided by - the place. + Options of parking provided by the place. sub_destinations (MutableSequence[google.maps.places_v1.types.Place.SubDestination]): - Output only. A list of sub destinations - related to the place. + A list of sub destinations related to the + place. accessibility_options (google.maps.places_v1.types.Place.AccessibilityOptions): - Output only. Information about the - accessibility options a place offers. + Information about the accessibility options a + place offers. This field is a member of `oneof`_ ``_accessibility_options``. fuel_options (google.maps.places_v1.types.FuelOptions): - Output only. The most recent information - about fuel options in a gas station. This - information is updated regularly. + The most recent information about fuel + options in a gas station. This information is + updated regularly. ev_charge_options (google.maps.places_v1.types.EVChargeOptions): - Output only. Information of ev charging - options. + Information of ev charging options. """ class BusinessStatus(proto.Enum): @@ -338,19 +359,19 @@ class AddressComponent(proto.Message): Attributes: long_text (str): - Output only. The full text description or name of the - address component. For example, an address component for the - country Australia may have a long_name of "Australia". + The full text description or name of the address component. + For example, an address component for the country Australia + may have a long_name of "Australia". short_text (str): - Output only. An abbreviated textual name for the address - component, if available. For example, an address component - for the country of Australia may have a short_name of "AU". + An abbreviated textual name for the address component, if + available. For example, an address component for the country + of Australia may have a short_name of "AU". types (MutableSequence[str]): - Output only. An array indicating the type(s) - of the address component. + An array indicating the type(s) of the + address component. language_code (str): - Output only. The language used to format this - components, in CLDR notation. + The language used to format this components, + in CLDR notation. """ long_text: str = proto.Field( @@ -378,15 +399,14 @@ class PlusCode(proto.Message): Attributes: global_code (str): - Output only. Place's global (full) code, such - as "9FWM33GV+HQ", representing an 1/8000 by - 1/8000 degree area (~14 by 14 meters). + Place's global (full) code, such as + "9FWM33GV+HQ", representing an 1/8000 by 1/8000 + degree area (~14 by 14 meters). compound_code (str): - Output only. Place's compound code, such as - "33GV+HQ, Ramberg, Norway", containing the - suffix of the global code and replacing the - prefix with a formatted name of a reference - entity. + Place's compound code, such as "33GV+HQ, + Ramberg, Norway", containing the suffix of the + global code and replacing the prefix with a + formatted name of a reference entity. """ global_code: str = proto.Field( @@ -405,37 +425,35 @@ class OpeningHours(proto.Message): Attributes: open_now (bool): - Output only. Is this place open right now? - Always present unless we lack time-of-day or - timezone data for these opening hours. + Is this place open right now? Always present + unless we lack time-of-day or timezone data for + these opening hours. This field is a member of `oneof`_ ``_open_now``. periods (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.Period]): - Output only. The periods that this place is - open during the week. The periods are in - chronological order, starting with Sunday in the - place-local timezone. An empty (but not absent) - value indicates a place that is never open, e.g. + The periods that this place is open during + the week. The periods are in chronological + order, starting with Sunday in the place-local + timezone. An empty (but not absent) value + indicates a place that is never open, e.g. because it is closed temporarily for renovations. weekday_descriptions (MutableSequence[str]): - Output only. Localized strings describing the - opening hours of this place, one string for each - day of the week. Will be empty if the hours are + Localized strings describing the opening + hours of this place, one string for each day of + the week. Will be empty if the hours are unknown or could not be converted to localized - text. Example: "Sun: - - 18:00–06:00". + text. Example: "Sun: 18:00–06:00". secondary_hours_type (google.maps.places_v1.types.Place.OpeningHours.SecondaryHoursType): - Output only. A type string used to identify - the type of secondary hours. + A type string used to identify the type of + secondary hours. special_days (MutableSequence[google.maps.places_v1.types.Place.OpeningHours.SpecialDay]): - Output only. Structured information for special days that - fall within the period that the returned opening hours - cover. Special days are days that could impact the business - hours of a place, e.g. Christmas day. Set for - current_opening_hours and current_secondary_opening_hours if - there are exceptional hours. + Structured information for special days that fall within the + period that the returned opening hours cover. Special days + are days that could impact the business hours of a place, + e.g. Christmas day. Set for current_opening_hours and + current_secondary_opening_hours if there are exceptional + hours. """ class SecondaryHoursType(proto.Enum): @@ -493,11 +511,9 @@ class Period(proto.Message): Attributes: open_ (google.maps.places_v1.types.Place.OpeningHours.Period.Point): - Output only. The time that the place starts - to be open. + The time that the place starts to be open. close (google.maps.places_v1.types.Place.OpeningHours.Period.Point): - Output only. The time that the place starts - to be closed. + The time that the place starts to be closed. """ class Point(proto.Message): @@ -507,31 +523,26 @@ class Point(proto.Message): Attributes: day (int): - Output only. A day of the week, as an integer - in the range 0-6. 0 is Sunday, 1 is Monday, - etc. + A day of the week, as an integer in the range + 0-6. 0 is Sunday, 1 is Monday, etc. This field is a member of `oneof`_ ``_day``. hour (int): - Output only. The hour in 2 digits. Ranges - from 00 to 23. + The hour in 2 digits. Ranges from 00 to 23. This field is a member of `oneof`_ ``_hour``. minute (int): - Output only. The minute in 2 digits. Ranges - from 00 to 59. + The minute in 2 digits. Ranges from 00 to 59. This field is a member of `oneof`_ ``_minute``. date (google.type.date_pb2.Date): - Output only. Date in the local timezone for - the place. + Date in the local timezone for the place. truncated (bool): - Output only. Whether or not this endpoint was truncated. - Truncation occurs when the real hours are outside the times - we are willing to return hours between, so we truncate the - hours back to these boundaries. This ensures that at most 24 - \* 7 hours from midnight of the day of the request are - returned. + Whether or not this endpoint was truncated. Truncation + occurs when the real hours are outside the times we are + willing to return hours between, so we truncate the hours + back to these boundaries. This ensures that at most 24 \* 7 + hours from midnight of the day of the request are returned. """ day: int = proto.Field( @@ -578,7 +589,7 @@ class SpecialDay(proto.Message): Attributes: date (google.type.date_pb2.Date): - Output only. The date of this special day. + The date of this special day. """ date: date_pb2.Date = proto.Field( @@ -619,11 +630,9 @@ class Attribution(proto.Message): Attributes: provider (str): - Output only. Name of the Place's data - provider. + Name of the Place's data provider. provider_uri (str): - Output only. URI to the Place's data - provider. + URI to the Place's data provider. """ provider: str = proto.Field( @@ -783,17 +792,44 @@ class AccessibilityOptions(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + wheelchair_accessible_parking (bool): + Place offers wheelchair accessible parking. + + This field is a member of `oneof`_ ``_wheelchair_accessible_parking``. wheelchair_accessible_entrance (bool): Places has wheelchair accessible entrance. This field is a member of `oneof`_ ``_wheelchair_accessible_entrance``. + wheelchair_accessible_restroom (bool): + Place has wheelchair accessible restroom. + + This field is a member of `oneof`_ ``_wheelchair_accessible_restroom``. + wheelchair_accessible_seating (bool): + Place has wheelchair accessible seating. + + This field is a member of `oneof`_ ``_wheelchair_accessible_seating``. """ + wheelchair_accessible_parking: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) wheelchair_accessible_entrance: bool = proto.Field( proto.BOOL, number=2, optional=True, ) + wheelchair_accessible_restroom: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + wheelchair_accessible_seating: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) name: str = proto.Field( proto.STRING, @@ -812,6 +848,15 @@ class AccessibilityOptions(proto.Message): proto.STRING, number=5, ) + primary_type: str = proto.Field( + proto.STRING, + number=50, + ) + primary_type_display_name: localized_text_pb2.LocalizedText = proto.Field( + proto.MESSAGE, + number=32, + message=localized_text_pb2.LocalizedText, + ) national_phone_number: str = proto.Field( proto.STRING, number=7, @@ -824,6 +869,10 @@ class AccessibilityOptions(proto.Message): proto.STRING, number=9, ) + short_formatted_address: str = proto.Field( + proto.STRING, + number=51, + ) address_components: MutableSequence[AddressComponent] = proto.RepeatedField( proto.MESSAGE, number=10, diff --git a/packages/google-maps-places/google/maps/places_v1/types/places_service.py b/packages/google-maps-places/google/maps/places_v1/types/places_service.py index faec30184cd1..07e4c7937378 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/places_service.py +++ b/packages/google-maps-places/google/maps/places_v1/types/places_service.py @@ -58,12 +58,16 @@ class SearchNearbyRequest(proto.Message): applicable law. For more information, see - http://www.unicode.org/reports/tr35/#unicode_region_subtag. + https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html. Note that 3-digit region codes are not currently supported. included_types (MutableSequence[str]): Included Place type (eg, "restaurant" or "gas_station") from - https://developers.google.com/places/supported_types. + https://developers.google.com/maps/documentation/places/web-service/place-types. + + Up to 50 types from `Table + A `__ + may be specified. If there are any conflicting types, i.e. a type appears in both included_types and excluded_types, an INVALID_ARGUMENT @@ -73,12 +77,16 @@ class SearchNearbyRequest(proto.Message): restrictions, only places that satisfy all of the restrictions are returned. For example, if we have {included_types = ["restaurant"], excluded_primary_types = - ["restaurant"]}, the returned places are POIs that provide - "restaurant" related services but do not operate primarily - as "restaurants". + ["restaurant"]}, the returned places provide "restaurant" + related services but do not operate primarily as + "restaurants". excluded_types (MutableSequence[str]): Excluded Place type (eg, "restaurant" or "gas_station") from - https://developers.google.com/places/supported_types. + https://developers.google.com/maps/documentation/places/web-service/place-types. + + Up to 50 types from `Table + A `__ + may be specified. If the client provides both included_types (e.g. restaurant) and excluded_types (e.g. cafe), then the response should @@ -94,13 +102,19 @@ class SearchNearbyRequest(proto.Message): restrictions, only places that satisfy all of the restrictions are returned. For example, if we have {included_types = ["restaurant"], excluded_primary_types = - ["restaurant"]}, the returned places are POIs that provide - "restaurant" related services but do not operate primarily - as "restaurants". + ["restaurant"]}, the returned places provide "restaurant" + related services but do not operate primarily as + "restaurants". included_primary_types (MutableSequence[str]): Included primary Place type (e.g. "restaurant" or "gas_station") from - https://developers.google.com/places/supported_types. + https://developers.google.com/maps/documentation/places/web-service/place-types. + A place can only have a single primary type from the + supported types table associated with it. + + Up to 50 types from `Table + A `__ + may be specified. If there are any conflicting primary types, i.e. a type appears in both included_primary_types and @@ -111,13 +125,17 @@ class SearchNearbyRequest(proto.Message): restrictions, only places that satisfy all of the restrictions are returned. For example, if we have {included_types = ["restaurant"], excluded_primary_types = - ["restaurant"]}, the returned places are POIs that provide - "restaurant" related services but do not operate primarily - as "restaurants". + ["restaurant"]}, the returned places provide "restaurant" + related services but do not operate primarily as + "restaurants". excluded_primary_types (MutableSequence[str]): Excluded primary Place type (e.g. "restaurant" or "gas_station") from - https://developers.google.com/places/supported_types. + https://developers.google.com/maps/documentation/places/web-service/place-types. + + Up to 50 types from `Table + A `__ + may be specified. If there are any conflicting primary types, i.e. a type appears in both included_primary_types and @@ -128,15 +146,15 @@ class SearchNearbyRequest(proto.Message): restrictions, only places that satisfy all of the restrictions are returned. For example, if we have {included_types = ["restaurant"], excluded_primary_types = - ["restaurant"]}, the returned places are POIs that provide - "restaurant" related services but do not operate primarily - as "restaurants". + ["restaurant"]}, the returned places provide "restaurant" + related services but do not operate primarily as + "restaurants". max_result_count (int): Maximum number of results to return. It must be between 1 - and 20, inclusively. If the number is unset, it falls back - to the upper limit. If the number is set to negative or - exceeds the upper limit, an INVALID_ARGUMENT error is - returned. + and 20 (default), inclusively. If the number is unset, it + falls back to the upper limit. If the number is set to + negative or exceeds the upper limit, an INVALID_ARGUMENT + error is returned. location_restriction (google.maps.places_v1.types.SearchNearbyRequest.LocationRestriction): Required. The region to search. rank_preference (google.maps.places_v1.types.SearchNearbyRequest.RankPreference): @@ -223,9 +241,9 @@ class SearchNearbyResponse(proto.Message): Attributes: places (MutableSequence[google.maps.places_v1.types.Place]): - A list of interesting places that meets - user's requirements like places types, number of - places and specific location restriction. + A list of places that meets user's + requirements like places types, number of places + and specific location restriction. """ places: MutableSequence[place.Place] = proto.RepeatedField( @@ -259,33 +277,32 @@ class SearchTextRequest(proto.Message): applicable law. For more information, see - http://www.unicode.org/reports/tr35/#unicode_region_subtag. + https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html. Note that 3-digit region codes are not currently supported. rank_preference (google.maps.places_v1.types.SearchTextRequest.RankPreference): How results will be ranked in the response. included_type (str): - The requested place type. Full list of types supported: - https://developers.google.com/places/supported_types. Only - support one included type. + The requested place type. Full list of types + supported: + https://developers.google.com/maps/documentation/places/web-service/place-types. + Only support one included type. open_now (bool): - Used to restrict the search to places that are open at a - specific time. open_now marks if a business is currently - open. + Used to restrict the search to places that + are currently open. The default is false. min_rating (float): Filter out results whose average user rating is strictly - less than this limit. A valid value must be an float between + less than this limit. A valid value must be a float between 0 and 5 (inclusively) at a 0.5 cadence i.e. [0, 0.5, 1.0, - ... , 5.0] inclusively. This is to keep parity with - LocalRefinement_UserRating. The input rating will round up - to the nearest 0.5(ceiling). For instance, a rating of 0.6 - will eliminate all results with a less than 1.0 rating. + ... , 5.0] inclusively. The input rating will round up to + the nearest 0.5(ceiling). For instance, a rating of 0.6 will + eliminate all results with a less than 1.0 rating. max_result_count (int): Maximum number of results to return. It must be between 1 - and 20, inclusively. If the number is unset, it falls back - to the upper limit. If the number is set to negative or - exceeds the upper limit, an INVALID_ARGUMENT error is - returned. + and 20, inclusively. The default is 20. If the number is + unset, it falls back to the upper limit. If the number is + set to negative or exceeds the upper limit, an + INVALID_ARGUMENT error is returned. price_levels (MutableSequence[google.maps.places_v1.types.PriceLevel]): Used to restrict the search to places that are marked as certain price levels. Users can @@ -316,8 +333,7 @@ class RankPreference(proto.Enum): Ranks results by distance. RELEVANCE (2): Ranks results by relevance. Sort order - determined by normal ranking stack. See - SortRefinement::RELEVANCE. + determined by normal ranking stack. """ RANK_PREFERENCE_UNSPECIFIED = 0 DISTANCE = 1 @@ -336,8 +352,14 @@ class LocationBias(proto.Message): Attributes: rectangle (google.geo.type.types.Viewport): - A rectangle box defined by northeast and - southwest corner. + A rectangle box defined by northeast and southwest corner. + ``rectangle.high()`` must be the northeast point of the + rectangle viewport. ``rectangle.low()`` must be the + southwest point of the rectangle viewport. + ``rectangle.low().latitude()`` cannot be greater than + ``rectangle.high().latitude()``. This will result in an + empty latitude range. A rectangle viewport cannot be wider + than 180 degrees. This field is a member of `oneof`_ ``type``. circle (google.maps.places_v1.types.Circle): @@ -368,8 +390,14 @@ class LocationRestriction(proto.Message): Attributes: rectangle (google.geo.type.types.Viewport): - A rectangle box defined by northeast and - southwest corner. + A rectangle box defined by northeast and southwest corner. + ``rectangle.high()`` must be the northeast point of the + rectangle viewport. ``rectangle.low()`` must be the + southwest point of the rectangle viewport. + ``rectangle.low().latitude()`` cannot be greater than + ``rectangle.high().latitude()``. This will result in an + empty latitude range. A rectangle viewport cannot be wider + than 180 degrees. This field is a member of `oneof`_ ``type``. """ @@ -457,9 +485,14 @@ class GetPhotoMediaRequest(proto.Message): Attributes: name (str): - Required. The resource name of a photo. It is returned in - Place's photos.name field. Format: - places//photos//media. + Required. The resource name of a photo media in the format: + ``places/{place_id}/photos/{photo_reference}/media``. + + The resource name of a photo as returned in a Place object's + ``photos.name`` field comes with the format + ``places/{place_id}/photos/{photo_reference}``. You need to + append ``/media`` at the end of the photo resource to get + the photo media resource name. max_width_px (int): Optional. Specifies the maximum desired width, in pixels, of the image. If the image is smaller than the values @@ -493,7 +526,7 @@ class GetPhotoMediaRequest(proto.Message): redirect behavior and render a text format (for example, in JSON format for HTTP use case) response. If not set, an HTTP redirect will be - issued to redirect the call to the image midea. + issued to redirect the call to the image media. This option is ignored for non-HTTP requests. """ @@ -520,9 +553,8 @@ class PhotoMedia(proto.Message): Attributes: name (str): - The resource name of a photo. It is returned in Place's - photos.name field. Format: - places//photos//media. + The resource name of a photo media in the format: + ``places/{place_id}/photos/{photo_reference}/media``. photo_uri (str): A short-lived uri that can be used to render the photo. @@ -544,9 +576,9 @@ class GetPlaceRequest(proto.Message): Attributes: name (str): - Required. A place_id returned in a Place (with "places/" + Required. A place ID returned in a Place (with "places/" prefix), or equivalently the name in the same Place. Format: - places/. + ``places/{place_id}``. language_code (str): Optional. Place details will be displayed with the preferred language if available. @@ -560,7 +592,7 @@ class GetPlaceRequest(proto.Message): used to display the place details, like region-specific place name, if available. The parameter can affect results based on applicable law. For more information, see - http://www.unicode.org/reports/tr35/#unicode_region_subtag. + https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html. Note that 3-digit region codes are not currently supported. """ diff --git a/packages/google-maps-places/google/maps/places_v1/types/review.py b/packages/google-maps-places/google/maps/places_v1/types/review.py index b5df7ae538f2..2c13eba6824b 100644 --- a/packages/google-maps-places/google/maps/places_v1/types/review.py +++ b/packages/google-maps-places/google/maps/places_v1/types/review.py @@ -36,28 +36,24 @@ class Review(proto.Message): Attributes: name (str): - Output only. A reference representing this place review - which may be used to look up this place review again (a.k.a. - the API "resource" name: - places/{place_id}/reviews/{review}). + A reference representing this place review which may be used + to look up this place review again (also called the API + "resource" name: ``places/{place_id}/reviews/{review}``). relative_publish_time_description (str): - Output only. A string of formatted recent - time, expressing the review time relative to the - current time in a form appropriate for the - language and country. + A string of formatted recent time, expressing + the review time relative to the current time in + a form appropriate for the language and country. text (google.type.localized_text_pb2.LocalizedText): - Output only. The localized text of the - review. + The localized text of the review. original_text (google.type.localized_text_pb2.LocalizedText): - Output only. The review text in its original - language. + The review text in its original language. rating (float): - Output only. A number between 1.0 and 5.0, - a.k.a. the number of stars. + A number between 1.0 and 5.0, also called the + number of stars. author_attribution (google.maps.places_v1.types.AuthorAttribution): - Output only. This review's author. + This review's author. publish_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp for the review. + Timestamp for the review. """ name: str = proto.Field( diff --git a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json index 8302a208b6e7..2379a066bd9f 100644 --- a/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json +++ b/packages/google-maps-places/samples/generated_samples/snippet_metadata_google.maps.places.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-maps-places", - "version": "0.1.5" + "version": "0.1.6" }, "snippets": [ { diff --git a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py index f8946f734500..2c8e8323ef47 100644 --- a/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py +++ b/packages/google-maps-places/tests/unit/gapic/places_v1/test_places.py @@ -1094,9 +1094,11 @@ def test_get_place(request_type, transport: str = "grpc"): name="name_value", id="id_value", types=["types_value"], + primary_type="primary_type_value", national_phone_number="national_phone_number_value", international_phone_number="international_phone_number_value", formatted_address="formatted_address_value", + short_formatted_address="short_formatted_address_value", rating=0.645, google_maps_uri="google_maps_uri_value", website_uri="website_uri_value", @@ -1143,9 +1145,11 @@ def test_get_place(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.id == "id_value" assert response.types == ["types_value"] + assert response.primary_type == "primary_type_value" assert response.national_phone_number == "national_phone_number_value" assert response.international_phone_number == "international_phone_number_value" assert response.formatted_address == "formatted_address_value" + assert response.short_formatted_address == "short_formatted_address_value" assert math.isclose(response.rating, 0.645, rel_tol=1e-6) assert response.google_maps_uri == "google_maps_uri_value" assert response.website_uri == "website_uri_value" @@ -1218,9 +1222,11 @@ async def test_get_place_async( name="name_value", id="id_value", types=["types_value"], + primary_type="primary_type_value", national_phone_number="national_phone_number_value", international_phone_number="international_phone_number_value", formatted_address="formatted_address_value", + short_formatted_address="short_formatted_address_value", rating=0.645, google_maps_uri="google_maps_uri_value", website_uri="website_uri_value", @@ -1268,9 +1274,11 @@ async def test_get_place_async( assert response.name == "name_value" assert response.id == "id_value" assert response.types == ["types_value"] + assert response.primary_type == "primary_type_value" assert response.national_phone_number == "national_phone_number_value" assert response.international_phone_number == "international_phone_number_value" assert response.formatted_address == "formatted_address_value" + assert response.short_formatted_address == "short_formatted_address_value" assert math.isclose(response.rating, 0.645, rel_tol=1e-6) assert response.google_maps_uri == "google_maps_uri_value" assert response.website_uri == "website_uri_value" @@ -2175,9 +2183,11 @@ def test_get_place_rest(request_type): name="name_value", id="id_value", types=["types_value"], + primary_type="primary_type_value", national_phone_number="national_phone_number_value", international_phone_number="international_phone_number_value", formatted_address="formatted_address_value", + short_formatted_address="short_formatted_address_value", rating=0.645, google_maps_uri="google_maps_uri_value", website_uri="website_uri_value", @@ -2229,9 +2239,11 @@ def test_get_place_rest(request_type): assert response.name == "name_value" assert response.id == "id_value" assert response.types == ["types_value"] + assert response.primary_type == "primary_type_value" assert response.national_phone_number == "national_phone_number_value" assert response.international_phone_number == "international_phone_number_value" assert response.formatted_address == "formatted_address_value" + assert response.short_formatted_address == "short_formatted_address_value" assert math.isclose(response.rating, 0.645, rel_tol=1e-6) assert response.google_maps_uri == "google_maps_uri_value" assert response.website_uri == "website_uri_value" diff --git a/packages/google-shopping-css/.OwlBot.yaml b/packages/google-shopping-css/.OwlBot.yaml new file mode 100644 index 000000000000..03e3b9520666 --- /dev/null +++ b/packages/google-shopping-css/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/shopping/css/(v.*)/.*-py + dest: /owl-bot-staging/google-shopping-css/$1 +api-name: google-shopping-css diff --git a/packages/google-shopping-css/.coveragerc b/packages/google-shopping-css/.coveragerc new file mode 100644 index 000000000000..11afeda0e930 --- /dev/null +++ b/packages/google-shopping-css/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/shopping/css/__init__.py + google/shopping/css/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-shopping-css/.flake8 b/packages/google-shopping-css/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-shopping-css/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-shopping-css/.gitignore b/packages/google-shopping-css/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-shopping-css/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-shopping-css/.repo-metadata.json b/packages/google-shopping-css/.repo-metadata.json new file mode 100644 index 000000000000..0e8f0e70bec7 --- /dev/null +++ b/packages/google-shopping-css/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-shopping-css", + "name_pretty": "CSS API", + "api_description": "Programmatically manage your Comparison Shopping Service (CSS) account data at scale.", + "product_documentation": "https://developers.google.com/comparison-shopping-services/api", + "client_documentation": "https://googleapis.dev/python/google-shopping-css/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=826068&template=1564577", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-shopping-css", + "api_id": "css.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "css" +} \ No newline at end of file diff --git a/packages/google-shopping-css/CHANGELOG.md b/packages/google-shopping-css/CHANGELOG.md new file mode 100644 index 000000000000..41e7c5f27bbb --- /dev/null +++ b/packages/google-shopping-css/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2023-12-12) + + +### Features + +* add initial files for google.shopping.css.v1 ([#12114](https://github.com/googleapis/google-cloud-python/issues/12114)) ([94e63cb](https://github.com/googleapis/google-cloud-python/commit/94e63cbbfe85b11e5cf38cbaa3511be8833a86f1)) + +## Changelog diff --git a/packages/google-shopping-css/CODE_OF_CONDUCT.md b/packages/google-shopping-css/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-shopping-css/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-shopping-css/CONTRIBUTING.rst b/packages/google-shopping-css/CONTRIBUTING.rst new file mode 100644 index 000000000000..e6e9d1877868 --- /dev/null +++ b/packages/google-shopping-css/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-shopping-css + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-shopping-css/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-shopping-css/LICENSE b/packages/google-shopping-css/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-shopping-css/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-shopping-css/MANIFEST.in b/packages/google-shopping-css/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-shopping-css/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-shopping-css/README.rst b/packages/google-shopping-css/README.rst new file mode 100644 index 000000000000..72ebc30f4cdb --- /dev/null +++ b/packages/google-shopping-css/README.rst @@ -0,0 +1,108 @@ +Python Client for CSS API +========================= + +|preview| |pypi| |versions| + +`CSS API`_: Programmatically manage your Comparison Shopping Service (CSS) account data at scale. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-shopping-css.svg + :target: https://pypi.org/project/google-shopping-css/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-shopping-css.svg + :target: https://pypi.org/project/google-shopping-css/ +.. _CSS API: https://developers.google.com/comparison-shopping-services/api +.. _Client Library Documentation: https://googleapis.dev/python/google-shopping-css/latest +.. _Product Documentation: https://developers.google.com/comparison-shopping-services/api + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the CSS API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the CSS API.: https://developers.google.com/comparison-shopping-services/api +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-shopping-css + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-shopping-css + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for CSS API + to see other available methods on the client. +- Read the `CSS API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _CSS API Product documentation: https://developers.google.com/comparison-shopping-services/api +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-shopping-css/docs/CHANGELOG.md b/packages/google-shopping-css/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-shopping-css/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-shopping-css/docs/README.rst b/packages/google-shopping-css/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-shopping-css/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-shopping-css/docs/_static/custom.css b/packages/google-shopping-css/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-shopping-css/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-shopping-css/docs/_templates/layout.html b/packages/google-shopping-css/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-shopping-css/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-shopping-css/docs/conf.py b/packages/google-shopping-css/docs/conf.py new file mode 100644 index 000000000000..77f5f4ed2b4f --- /dev/null +++ b/packages/google-shopping-css/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-shopping-css documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-shopping-css" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-shopping-css", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-shopping-css-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-shopping-css.tex", + "google-shopping-css Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-shopping-css", + "google-shopping-css Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-shopping-css", + "google-shopping-css Documentation", + author, + "google-shopping-css", + "google-shopping-css Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-shopping-css/docs/css_v1/account_labels_service.rst b/packages/google-shopping-css/docs/css_v1/account_labels_service.rst new file mode 100644 index 000000000000..8738d1d2b3e5 --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/account_labels_service.rst @@ -0,0 +1,10 @@ +AccountLabelsService +-------------------------------------- + +.. automodule:: google.shopping.css_v1.services.account_labels_service + :members: + :inherited-members: + +.. automodule:: google.shopping.css_v1.services.account_labels_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-css/docs/css_v1/accounts_service.rst b/packages/google-shopping-css/docs/css_v1/accounts_service.rst new file mode 100644 index 000000000000..c749ba01122c --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/accounts_service.rst @@ -0,0 +1,10 @@ +AccountsService +--------------------------------- + +.. automodule:: google.shopping.css_v1.services.accounts_service + :members: + :inherited-members: + +.. automodule:: google.shopping.css_v1.services.accounts_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-css/docs/css_v1/css_product_inputs_service.rst b/packages/google-shopping-css/docs/css_v1/css_product_inputs_service.rst new file mode 100644 index 000000000000..628fe0c9c05b --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/css_product_inputs_service.rst @@ -0,0 +1,6 @@ +CssProductInputsService +----------------------------------------- + +.. automodule:: google.shopping.css_v1.services.css_product_inputs_service + :members: + :inherited-members: diff --git a/packages/google-shopping-css/docs/css_v1/css_products_service.rst b/packages/google-shopping-css/docs/css_v1/css_products_service.rst new file mode 100644 index 000000000000..53df112bb801 --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/css_products_service.rst @@ -0,0 +1,10 @@ +CssProductsService +------------------------------------ + +.. automodule:: google.shopping.css_v1.services.css_products_service + :members: + :inherited-members: + +.. automodule:: google.shopping.css_v1.services.css_products_service.pagers + :members: + :inherited-members: diff --git a/packages/google-shopping-css/docs/css_v1/services_.rst b/packages/google-shopping-css/docs/css_v1/services_.rst new file mode 100644 index 000000000000..496beecd39dc --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/services_.rst @@ -0,0 +1,9 @@ +Services for Google Shopping Css v1 API +======================================= +.. toctree:: + :maxdepth: 2 + + account_labels_service + accounts_service + css_product_inputs_service + css_products_service diff --git a/packages/google-shopping-css/docs/css_v1/types_.rst b/packages/google-shopping-css/docs/css_v1/types_.rst new file mode 100644 index 000000000000..165888559c88 --- /dev/null +++ b/packages/google-shopping-css/docs/css_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Shopping Css v1 API +==================================== + +.. automodule:: google.shopping.css_v1.types + :members: + :show-inheritance: diff --git a/packages/google-shopping-css/docs/index.rst b/packages/google-shopping-css/docs/index.rst new file mode 100644 index 000000000000..fd080362a227 --- /dev/null +++ b/packages/google-shopping-css/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + css_v1/services_ + css_v1/types_ + + +Changelog +--------- + +For a list of all ``google-shopping-css`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-shopping-css/docs/multiprocessing.rst b/packages/google-shopping-css/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-shopping-css/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-shopping-css/google/shopping/css/__init__.py b/packages/google-shopping-css/google/shopping/css/__init__.py new file mode 100644 index 000000000000..793e6d80b6dd --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css/__init__.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.css import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.shopping.css_v1.services.account_labels_service.async_client import ( + AccountLabelsServiceAsyncClient, +) +from google.shopping.css_v1.services.account_labels_service.client import ( + AccountLabelsServiceClient, +) +from google.shopping.css_v1.services.accounts_service.async_client import ( + AccountsServiceAsyncClient, +) +from google.shopping.css_v1.services.accounts_service.client import ( + AccountsServiceClient, +) +from google.shopping.css_v1.services.css_product_inputs_service.async_client import ( + CssProductInputsServiceAsyncClient, +) +from google.shopping.css_v1.services.css_product_inputs_service.client import ( + CssProductInputsServiceClient, +) +from google.shopping.css_v1.services.css_products_service.async_client import ( + CssProductsServiceAsyncClient, +) +from google.shopping.css_v1.services.css_products_service.client import ( + CssProductsServiceClient, +) +from google.shopping.css_v1.types.accounts import ( + Account, + GetAccountRequest, + ListChildAccountsRequest, + ListChildAccountsResponse, + UpdateAccountLabelsRequest, +) +from google.shopping.css_v1.types.accounts_labels import ( + AccountLabel, + CreateAccountLabelRequest, + DeleteAccountLabelRequest, + ListAccountLabelsRequest, + ListAccountLabelsResponse, + UpdateAccountLabelRequest, +) +from google.shopping.css_v1.types.css_product_common import ( + Attributes, + Certification, + CssProductStatus, + ProductDetail, + ProductDimension, + ProductWeight, +) +from google.shopping.css_v1.types.css_product_inputs import ( + CssProductInput, + DeleteCssProductInputRequest, + InsertCssProductInputRequest, +) +from google.shopping.css_v1.types.css_products import ( + CssProduct, + GetCssProductRequest, + ListCssProductsRequest, + ListCssProductsResponse, +) + +__all__ = ( + "AccountLabelsServiceClient", + "AccountLabelsServiceAsyncClient", + "AccountsServiceClient", + "AccountsServiceAsyncClient", + "CssProductInputsServiceClient", + "CssProductInputsServiceAsyncClient", + "CssProductsServiceClient", + "CssProductsServiceAsyncClient", + "Account", + "GetAccountRequest", + "ListChildAccountsRequest", + "ListChildAccountsResponse", + "UpdateAccountLabelsRequest", + "AccountLabel", + "CreateAccountLabelRequest", + "DeleteAccountLabelRequest", + "ListAccountLabelsRequest", + "ListAccountLabelsResponse", + "UpdateAccountLabelRequest", + "Attributes", + "Certification", + "CssProductStatus", + "ProductDetail", + "ProductDimension", + "ProductWeight", + "CssProductInput", + "DeleteCssProductInputRequest", + "InsertCssProductInputRequest", + "CssProduct", + "GetCssProductRequest", + "ListCssProductsRequest", + "ListCssProductsResponse", +) diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py new file mode 100644 index 000000000000..a7d39deb7a45 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css/py.typed b/packages/google-shopping-css/google/shopping/css/py.typed new file mode 100644 index 000000000000..29c7b3690029 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-css package uses inline types. diff --git a/packages/google-shopping-css/google/shopping/css_v1/__init__.py b/packages/google-shopping-css/google/shopping/css_v1/__init__.py new file mode 100644 index 000000000000..e5e2f014ca9f --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/__init__.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.shopping.css_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.account_labels_service import ( + AccountLabelsServiceAsyncClient, + AccountLabelsServiceClient, +) +from .services.accounts_service import AccountsServiceAsyncClient, AccountsServiceClient +from .services.css_product_inputs_service import ( + CssProductInputsServiceAsyncClient, + CssProductInputsServiceClient, +) +from .services.css_products_service import ( + CssProductsServiceAsyncClient, + CssProductsServiceClient, +) +from .types.accounts import ( + Account, + GetAccountRequest, + ListChildAccountsRequest, + ListChildAccountsResponse, + UpdateAccountLabelsRequest, +) +from .types.accounts_labels import ( + AccountLabel, + CreateAccountLabelRequest, + DeleteAccountLabelRequest, + ListAccountLabelsRequest, + ListAccountLabelsResponse, + UpdateAccountLabelRequest, +) +from .types.css_product_common import ( + Attributes, + Certification, + CssProductStatus, + ProductDetail, + ProductDimension, + ProductWeight, +) +from .types.css_product_inputs import ( + CssProductInput, + DeleteCssProductInputRequest, + InsertCssProductInputRequest, +) +from .types.css_products import ( + CssProduct, + GetCssProductRequest, + ListCssProductsRequest, + ListCssProductsResponse, +) + +__all__ = ( + "AccountLabelsServiceAsyncClient", + "AccountsServiceAsyncClient", + "CssProductInputsServiceAsyncClient", + "CssProductsServiceAsyncClient", + "Account", + "AccountLabel", + "AccountLabelsServiceClient", + "AccountsServiceClient", + "Attributes", + "Certification", + "CreateAccountLabelRequest", + "CssProduct", + "CssProductInput", + "CssProductInputsServiceClient", + "CssProductStatus", + "CssProductsServiceClient", + "DeleteAccountLabelRequest", + "DeleteCssProductInputRequest", + "GetAccountRequest", + "GetCssProductRequest", + "InsertCssProductInputRequest", + "ListAccountLabelsRequest", + "ListAccountLabelsResponse", + "ListChildAccountsRequest", + "ListChildAccountsResponse", + "ListCssProductsRequest", + "ListCssProductsResponse", + "ProductDetail", + "ProductDimension", + "ProductWeight", + "UpdateAccountLabelRequest", + "UpdateAccountLabelsRequest", +) diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_metadata.json b/packages/google-shopping-css/google/shopping/css_v1/gapic_metadata.json new file mode 100644 index 000000000000..f67d8636033b --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_metadata.json @@ -0,0 +1,250 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.shopping.css_v1", + "protoPackage": "google.shopping.css.v1", + "schema": "1.0", + "services": { + "AccountLabelsService": { + "clients": { + "grpc": { + "libraryClient": "AccountLabelsServiceClient", + "rpcs": { + "CreateAccountLabel": { + "methods": [ + "create_account_label" + ] + }, + "DeleteAccountLabel": { + "methods": [ + "delete_account_label" + ] + }, + "ListAccountLabels": { + "methods": [ + "list_account_labels" + ] + }, + "UpdateAccountLabel": { + "methods": [ + "update_account_label" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccountLabelsServiceAsyncClient", + "rpcs": { + "CreateAccountLabel": { + "methods": [ + "create_account_label" + ] + }, + "DeleteAccountLabel": { + "methods": [ + "delete_account_label" + ] + }, + "ListAccountLabels": { + "methods": [ + "list_account_labels" + ] + }, + "UpdateAccountLabel": { + "methods": [ + "update_account_label" + ] + } + } + }, + "rest": { + "libraryClient": "AccountLabelsServiceClient", + "rpcs": { + "CreateAccountLabel": { + "methods": [ + "create_account_label" + ] + }, + "DeleteAccountLabel": { + "methods": [ + "delete_account_label" + ] + }, + "ListAccountLabels": { + "methods": [ + "list_account_labels" + ] + }, + "UpdateAccountLabel": { + "methods": [ + "update_account_label" + ] + } + } + } + } + }, + "AccountsService": { + "clients": { + "grpc": { + "libraryClient": "AccountsServiceClient", + "rpcs": { + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListChildAccounts": { + "methods": [ + "list_child_accounts" + ] + }, + "UpdateLabels": { + "methods": [ + "update_labels" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccountsServiceAsyncClient", + "rpcs": { + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListChildAccounts": { + "methods": [ + "list_child_accounts" + ] + }, + "UpdateLabels": { + "methods": [ + "update_labels" + ] + } + } + }, + "rest": { + "libraryClient": "AccountsServiceClient", + "rpcs": { + "GetAccount": { + "methods": [ + "get_account" + ] + }, + "ListChildAccounts": { + "methods": [ + "list_child_accounts" + ] + }, + "UpdateLabels": { + "methods": [ + "update_labels" + ] + } + } + } + } + }, + "CssProductInputsService": { + "clients": { + "grpc": { + "libraryClient": "CssProductInputsServiceClient", + "rpcs": { + "DeleteCssProductInput": { + "methods": [ + "delete_css_product_input" + ] + }, + "InsertCssProductInput": { + "methods": [ + "insert_css_product_input" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CssProductInputsServiceAsyncClient", + "rpcs": { + "DeleteCssProductInput": { + "methods": [ + "delete_css_product_input" + ] + }, + "InsertCssProductInput": { + "methods": [ + "insert_css_product_input" + ] + } + } + }, + "rest": { + "libraryClient": "CssProductInputsServiceClient", + "rpcs": { + "DeleteCssProductInput": { + "methods": [ + "delete_css_product_input" + ] + }, + "InsertCssProductInput": { + "methods": [ + "insert_css_product_input" + ] + } + } + } + } + }, + "CssProductsService": { + "clients": { + "grpc": { + "libraryClient": "CssProductsServiceClient", + "rpcs": { + "GetCssProduct": { + "methods": [ + "get_css_product" + ] + }, + "ListCssProducts": { + "methods": [ + "list_css_products" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CssProductsServiceAsyncClient", + "rpcs": { + "GetCssProduct": { + "methods": [ + "get_css_product" + ] + }, + "ListCssProducts": { + "methods": [ + "list_css_products" + ] + } + } + }, + "rest": { + "libraryClient": "CssProductsServiceClient", + "rpcs": { + "GetCssProduct": { + "methods": [ + "get_css_product" + ] + }, + "ListCssProducts": { + "methods": [ + "list_css_products" + ] + } + } + } + } + } + } +} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py new file mode 100644 index 000000000000..a7d39deb7a45 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/py.typed b/packages/google-shopping-css/google/shopping/css_v1/py.typed new file mode 100644 index 000000000000..29c7b3690029 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-shopping-css package uses inline types. diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/__init__.py b/packages/google-shopping-css/google/shopping/css_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/__init__.py b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/__init__.py new file mode 100644 index 000000000000..450eca085f59 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AccountLabelsServiceAsyncClient +from .client import AccountLabelsServiceClient + +__all__ = ( + "AccountLabelsServiceClient", + "AccountLabelsServiceAsyncClient", +) diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py new file mode 100644 index 000000000000..52ddd0874fb1 --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/async_client.py @@ -0,0 +1,664 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.css_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + +from google.shopping.css_v1.services.account_labels_service import pagers +from google.shopping.css_v1.types import accounts_labels + +from .client import AccountLabelsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AccountLabelsServiceTransport +from .transports.grpc_asyncio import AccountLabelsServiceGrpcAsyncIOTransport + + +class AccountLabelsServiceAsyncClient: + """Manages Merchant Center and CSS accounts labels.""" + + _client: AccountLabelsServiceClient + + DEFAULT_ENDPOINT = AccountLabelsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AccountLabelsServiceClient.DEFAULT_MTLS_ENDPOINT + + account_label_path = staticmethod(AccountLabelsServiceClient.account_label_path) + parse_account_label_path = staticmethod( + AccountLabelsServiceClient.parse_account_label_path + ) + common_billing_account_path = staticmethod( + AccountLabelsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AccountLabelsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AccountLabelsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AccountLabelsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AccountLabelsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AccountLabelsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(AccountLabelsServiceClient.common_project_path) + parse_common_project_path = staticmethod( + AccountLabelsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(AccountLabelsServiceClient.common_location_path) + parse_common_location_path = staticmethod( + AccountLabelsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountLabelsServiceAsyncClient: The constructed client. + """ + return AccountLabelsServiceClient.from_service_account_info.__func__(AccountLabelsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountLabelsServiceAsyncClient: The constructed client. + """ + return AccountLabelsServiceClient.from_service_account_file.__func__(AccountLabelsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AccountLabelsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AccountLabelsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountLabelsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AccountLabelsServiceClient).get_transport_class, + type(AccountLabelsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AccountLabelsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the account labels service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AccountLabelsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AccountLabelsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_account_labels( + self, + request: Optional[Union[accounts_labels.ListAccountLabelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAccountLabelsAsyncPager: + r"""Lists the labels assigned to an account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import css_v1 + + async def sample_list_account_labels(): + # Create a client + client = css_v1.AccountLabelsServiceAsyncClient() + + # Initialize request argument(s) + request = css_v1.ListAccountLabelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_account_labels(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.shopping.css_v1.types.ListAccountLabelsRequest, dict]]): + The request object. Request message for the ``ListAccountLabels`` method. + parent (:class:`str`): + Required. The parent account. + Format: accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.css_v1.services.account_labels_service.pagers.ListAccountLabelsAsyncPager: + Response message for the ListAccountLabels method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = accounts_labels.ListAccountLabelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_account_labels, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAccountLabelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_account_label( + self, + request: Optional[ + Union[accounts_labels.CreateAccountLabelRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + account_label: Optional[accounts_labels.AccountLabel] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts_labels.AccountLabel: + r"""Creates a new label, not assigned to any account. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import css_v1 + + async def sample_create_account_label(): + # Create a client + client = css_v1.AccountLabelsServiceAsyncClient() + + # Initialize request argument(s) + request = css_v1.CreateAccountLabelRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_account_label(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.css_v1.types.CreateAccountLabelRequest, dict]]): + The request object. Request message for the + 'CreateAccountLanel' method. + parent (:class:`str`): + Required. The parent account. + Format: accounts/{account} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + account_label (:class:`google.shopping.css_v1.types.AccountLabel`): + Required. The label to create. + This corresponds to the ``account_label`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.css_v1.types.AccountLabel: + Label assigned by CSS domain or CSS + group to one of its sub-accounts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, account_label]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = accounts_labels.CreateAccountLabelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if account_label is not None: + request.account_label = account_label + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_account_label, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_account_label( + self, + request: Optional[ + Union[accounts_labels.UpdateAccountLabelRequest, dict] + ] = None, + *, + account_label: Optional[accounts_labels.AccountLabel] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> accounts_labels.AccountLabel: + r"""Updates a label. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import css_v1 + + async def sample_update_account_label(): + # Create a client + client = css_v1.AccountLabelsServiceAsyncClient() + + # Initialize request argument(s) + request = css_v1.UpdateAccountLabelRequest( + ) + + # Make the request + response = await client.update_account_label(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.shopping.css_v1.types.UpdateAccountLabelRequest, dict]]): + The request object. Request message for the ``UpdateAccountLabel`` method. + account_label (:class:`google.shopping.css_v1.types.AccountLabel`): + Required. The updated label. All + fields must be provided. + + This corresponds to the ``account_label`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.shopping.css_v1.types.AccountLabel: + Label assigned by CSS domain or CSS + group to one of its sub-accounts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([account_label]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = accounts_labels.UpdateAccountLabelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if account_label is not None: + request.account_label = account_label + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_account_label, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("account_label.name", request.account_label.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_account_label( + self, + request: Optional[ + Union[accounts_labels.DeleteAccountLabelRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a label and removes it from all accounts to + which it was assigned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.shopping import css_v1 + + async def sample_delete_account_label(): + # Create a client + client = css_v1.AccountLabelsServiceAsyncClient() + + # Initialize request argument(s) + request = css_v1.DeleteAccountLabelRequest( + name="name_value", + ) + + # Make the request + await client.delete_account_label(request=request) + + Args: + request (Optional[Union[google.shopping.css_v1.types.DeleteAccountLabelRequest, dict]]): + The request object. Request message for the + 'DeleteAccountLabel' method. + name (:class:`str`): + Required. The name of the label to + delete. Format: + accounts/{account}/labels/{label} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = accounts_labels.DeleteAccountLabelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_account_label, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "AccountLabelsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AccountLabelsServiceAsyncClient",) diff --git a/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py new file mode 100644 index 000000000000..1d3050f21cab --- /dev/null +++ b/packages/google-shopping-css/google/shopping/css_v1/services/account_labels_service/client.py @@ -0,0 +1,883 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.shopping.css_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.shopping.css_v1.services.account_labels_service import pagers +from google.shopping.css_v1.types import accounts_labels + +from .transports.base import DEFAULT_CLIENT_INFO, AccountLabelsServiceTransport +from .transports.grpc import AccountLabelsServiceGrpcTransport +from .transports.grpc_asyncio import AccountLabelsServiceGrpcAsyncIOTransport +from .transports.rest import AccountLabelsServiceRestTransport + + +class AccountLabelsServiceClientMeta(type): + """Metaclass for the AccountLabelsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AccountLabelsServiceTransport]] + _transport_registry["grpc"] = AccountLabelsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AccountLabelsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AccountLabelsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AccountLabelsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AccountLabelsServiceClient(metaclass=AccountLabelsServiceClientMeta): + """Manages Merchant Center and CSS accounts labels.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "css.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountLabelsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccountLabelsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AccountLabelsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AccountLabelsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def account_label_path( + account: str, + label: str, + ) -> str: + """Returns a fully-qualified account_label string.""" + return "accounts/{account}/labels/{label}".format( + account=account, + label=label, + ) + + @staticmethod + def parse_account_label_path(path: str) -> Dict[str, str]: + """Parses a account_label path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)/labels/(?P